'use strict'; // get the total length, number of lines, and length of the last line of a string const get_offsets = str => { const { length } = str; let lines = 1; let last = 0; for (let i = 0; i < length; i++) { if (str[i] === '\n') { lines++; last = 0; } else { last++; } } return { length, lines, last }; }; // dedent a script block, and get offsets necessary to later adjust linting messages about the block const dedent_code = str => { let indentation = ''; for (let i = 0; i < str.length; i++) { const char = str[i]; if (char === '\n' || char === '\r') { indentation = ''; } else if (char === ' ' || char === '\t') { indentation += str[i]; } else { break; } } const { length } = indentation; let dedented = ''; const offsets = []; const total_offsets = [0]; for (let i = 0; i < str.length; i++) { if (i === 0 || str[i - 1] === '\n') { if (str.slice(i, i + length) === indentation) { i += length; offsets.push(length); } else { offsets.push(0); } total_offsets.push(total_offsets[total_offsets.length - 1] + offsets[offsets.length - 1]); if (i >= str.length) { break; } } dedented += str[i]; } return { dedented, offsets: { offsets, total_offsets } }; }; // get character offsets of each line in a string const get_line_offsets = str => { const offsets = [-1]; for (let i = 0; i < str.length; i++) { if (str[i] === '\n') { offsets.push(i); } } return offsets; }; // return a new block const new_block = () => ({ transformed_code: '', line_offsets: null, translations: new Map() }); // get translation info and include the processed scripts in this block's transformed_code const get_translation = (text, block, node, options = {}) => { block.transformed_code += '\n'; const translation = { options, unoffsets: get_offsets(block.transformed_code) }; translation.range = [node.start, node.end]; const { dedented, offsets } = dedent_code(text.slice(node.start, node.end)); block.transformed_code += dedented; translation.offsets = get_offsets(text.slice(0, node.start)); translation.dedent = offsets; translation.end = get_offsets(block.transformed_code).lines; for (let i = translation.unoffsets.lines; i <= translation.end; i++) { block.translations.set(i, translation); } block.transformed_code += '\n'; }; const processor_options = {}; // find Linter instance const linter_path = Object.keys(require.cache).find(path => path.endsWith('/eslint/lib/linter/linter.js') || path.endsWith('\\eslint\\lib\\linter\\linter.js')); if (!linter_path) { throw new Error('Could not find ESLint Linter in require cache'); } const { Linter } = require(linter_path); // patch Linter#verify const { verify } = Linter.prototype; Linter.prototype.verify = function(code, config, options) { // fetch settings const settings = config && (typeof config.extractConfig === 'function' ? config.extractConfig(options.filename) : config).settings || {}; processor_options.custom_compiler = settings['svelte3/compiler']; processor_options.ignore_warnings = settings['svelte3/ignore-warnings']; processor_options.ignore_styles = settings['svelte3/ignore-styles']; processor_options.compiler_options = settings['svelte3/compiler-options']; processor_options.named_blocks = settings['svelte3/named-blocks']; processor_options.typescript = settings['svelte3/typescript']; // call original Linter#verify return verify.call(this, code, config, options); }; let state; const reset = () => { state = { messages: null, var_names: null, blocks: new Map(), }; }; reset(); var charToInteger = {}; var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; for (var i = 0; i < chars.length; i++) { charToInteger[chars.charCodeAt(i)] = i; } function decode(mappings) { var decoded = []; var line = []; var segment = [ 0, 0, 0, 0, 0, ]; var j = 0; for (var i = 0, shift = 0, value = 0; i < mappings.length; i++) { var c = mappings.charCodeAt(i); if (c === 44) { // "," segmentify(line, segment, j); j = 0; } else if (c === 59) { // ";" segmentify(line, segment, j); j = 0; decoded.push(line); line = []; segment[0] = 0; } else { var integer = charToInteger[c]; if (integer === undefined) { throw new Error('Invalid character (' + String.fromCharCode(c) + ')'); } var hasContinuationBit = integer & 32; integer &= 31; value += integer << shift; if (hasContinuationBit) { shift += 5; } else { var shouldNegate = value & 1; value >>>= 1; if (shouldNegate) { value = value === 0 ? -0x80000000 : -value; } segment[j] += value; j++; value = shift = 0; // reset } } } segmentify(line, segment, j); decoded.push(line); return decoded; } function segmentify(line, segment, j) { // This looks ugly, but we're creating specialized arrays with a specific // length. This is much faster than creating a new array (which v8 expands to // a capacity of 17 after pushing the first item), or slicing out a subarray // (which is slow). Length 4 is assumed to be the most frequent, followed by // length 5 (since not everything will have an associated name), followed by // length 1 (it's probably rare for a source substring to not have an // associated segment data). if (j === 4) line.push([segment[0], segment[1], segment[2], segment[3]]); else if (j === 5) line.push([segment[0], segment[1], segment[2], segment[3], segment[4]]); else if (j === 1) line.push([segment[0]]); } class GeneratedFragmentMapper { constructor(generated_code, diff) { this.generated_code = generated_code; this.diff = diff; } get_position_relative_to_fragment(position_relative_to_file) { const fragment_offset = this.offset_in_fragment(offset_at(position_relative_to_file, this.generated_code)); return position_at(fragment_offset, this.diff.generated_content); } offset_in_fragment(offset) { return offset - this.diff.generated_start } } class OriginalFragmentMapper { constructor(original_code, diff) { this.original_code = original_code; this.diff = diff; } get_position_relative_to_file(position_relative_to_fragment) { const parent_offset = this.offset_in_parent(offset_at(position_relative_to_fragment, this.diff.original_content)); return position_at(parent_offset, this.original_code); } offset_in_parent(offset) { return this.diff.original_start + offset; } } class SourceMapper { constructor(raw_source_map) { this.raw_source_map = raw_source_map; } get_original_position(generated_position) { if (generated_position.line < 0) { return { line: -1, column: -1 }; } // Lazy-load if (!this.decoded) { this.decoded = decode(JSON.parse(this.raw_source_map).mappings); } let line = generated_position.line; let column = generated_position.column; let line_match = this.decoded[line]; while (line >= 0 && (!line_match || !line_match.length)) { line -= 1; line_match = this.decoded[line]; if (line_match && line_match.length) { return { line: line_match[line_match.length - 1][2], column: line_match[line_match.length - 1][3] }; } } if (line < 0) { return { line: -1, column: -1 }; } const column_match = line_match.find((col, idx) => idx + 1 === line_match.length || (col[0] <= column && line_match[idx + 1][0] > column) ); return { line: column_match[2], column: column_match[3], }; } } class DocumentMapper { constructor(original_code, generated_code, diffs) { this.original_code = original_code; this.generated_code = generated_code; this.diffs = diffs; this.mappers = diffs.map(diff => { return { start: diff.generated_start, end: diff.generated_end, diff: diff.diff, generated_fragment_mapper: new GeneratedFragmentMapper(generated_code, diff), source_mapper: new SourceMapper(diff.map), original_fragment_mapper: new OriginalFragmentMapper(original_code, diff) } }); } get_original_position(generated_position) { generated_position = { line: generated_position.line - 1, column: generated_position.column }; const offset = offset_at(generated_position, this.generated_code); let original_offset = offset; for (const mapper of this.mappers) { if (offset >= mapper.start && offset <= mapper.end) { return this.map(mapper, generated_position); } if (offset > mapper.end) { original_offset -= mapper.diff; } } const original_position = position_at(original_offset, this.original_code); return this.to_ESLint_position(original_position); } map(mapper, generated_position) { // Map the position to be relative to the transpiled fragment const position_in_transpiled_fragment = mapper.generated_fragment_mapper.get_position_relative_to_fragment( generated_position ); // Map the position, using the sourcemap, to the original position in the source fragment const position_in_original_fragment = mapper.source_mapper.get_original_position( position_in_transpiled_fragment ); // Map the position to be in the original fragment's parent const original_position = mapper.original_fragment_mapper.get_position_relative_to_file(position_in_original_fragment); return this.to_ESLint_position(original_position); } to_ESLint_position(position) { // ESLint line/column is 1-based return { line: position.line + 1, column: position.column + 1 }; } } /** * Get the offset of the line and character position * @param position Line and character position * @param text The text for which the offset should be retrieved */ function offset_at(position, text) { const line_offsets = get_line_offsets$1(text); if (position.line >= line_offsets.length) { return text.length; } else if (position.line < 0) { return 0; } const line_offset = line_offsets[position.line]; const next_line_offset = position.line + 1 < line_offsets.length ? line_offsets[position.line + 1] : text.length; return clamp(next_line_offset, line_offset, line_offset + position.column); } function position_at(offset, text) { offset = clamp(offset, 0, text.length); const line_offsets = get_line_offsets$1(text); let low = 0; let high = line_offsets.length; if (high === 0) { return { line: 0, column: offset }; } while (low < high) { const mid = Math.floor((low + high) / 2); if (line_offsets[mid] > offset) { high = mid; } else { low = mid + 1; } } // low is the least x for which the line offset is larger than the current offset // or array.length if no line offset is larger than the current offset const line = low - 1; return { line, column: offset - line_offsets[line] }; } function get_line_offsets$1(text) { const line_offsets = []; let is_line_start = true; for (let i = 0; i < text.length; i++) { if (is_line_start) { line_offsets.push(i); is_line_start = false; } const ch = text.charAt(i); is_line_start = ch === '\r' || ch === '\n'; if (ch === '\r' && i + 1 < text.length && text.charAt(i + 1) === '\n') { i++; } } if (is_line_start && text.length > 0) { line_offsets.push(text.length); } return line_offsets; } function clamp(num, min, max) { return Math.max(min, Math.min(max, num)); } let default_compiler; // find the contextual name or names described by a particular node in the AST const contextual_names = []; const find_contextual_names = (compiler, node) => { if (node) { if (typeof node === 'string') { contextual_names.push(node); } else if (typeof node === 'object') { compiler.walk(node, { enter(node, parent, prop) { if (node.name && prop !== 'key') { contextual_names.push(node.name); } }, }); } } }; // extract scripts to lint from component definition const preprocess = text => { const compiler = processor_options.custom_compiler || default_compiler || (default_compiler = require('svelte/compiler')); if (processor_options.ignore_styles) { // wipe the appropriate