2929 lines
128 KiB
TypeScript
2929 lines
128 KiB
TypeScript
/* xlsx.js (C) 2013-present SheetJS -- http://sheetjs.com */
|
||
/* vim: set ts=2: */
|
||
/*jshint mocha:true */
|
||
/* eslint-env mocha */
|
||
/*global process, document, require */
|
||
/*global ArrayBuffer, Uint8Array */
|
||
/*::
|
||
declare type EmptyFunc = (() => void) | null;
|
||
declare var afterEach:(test:EmptyFunc)=>void;
|
||
declare var cptable: any;
|
||
*/
|
||
import * as assert_ from 'https://deno.land/std/testing/asserts.ts';
|
||
import * as base64_ from 'https://deno.land/std/encoding/base64.ts';
|
||
const assert: any = {...assert_};
|
||
assert.throws = function(f: () => void) { assert.assertThrows(function() { try { f(); } catch(e) { throw e instanceof Error ? e : new Error(e); }})};
|
||
assert.doesNotThrow = function(f: ()=>void) { f(); };
|
||
assert.equal = assert.assertEquals;
|
||
assert.notEqual = assert.assertNotEquals;
|
||
assert.deepEqual = (x: any,y: any, z?: string) => assert.equal(JSON.stringify(x), JSON.stringify(y), z);
|
||
|
||
// @deno-types="./types/index.d.ts"
|
||
import * as X from './xlsx.mjs';
|
||
import * as cptable from './dist/cpexcel.full.mjs';
|
||
X.set_cptable(cptable);
|
||
import XLSX_ZAHL from './dist/xlsx.zahl.mjs';
|
||
var DIF_XL = true;
|
||
|
||
type BSEncoding = 'utf-8' | 'binary' | 'base64';
|
||
type BFEncoding = 'buffer';
|
||
type ShEncoding = BSEncoding | BFEncoding;
|
||
function readFileSync2(x: string): Uint8Array;
|
||
function readFileSync2(x: string, e: BSEncoding): string;
|
||
function readFileSync2(x: string, e: BFEncoding): Uint8Array;
|
||
function readFileSync2(x: string, e?: ShEncoding): Uint8Array | string {
|
||
const u8 = Deno.readFileSync(x);
|
||
if(!e) return u8;
|
||
switch(e) {
|
||
case 'utf-8': return new TextDecoder().decode(u8);
|
||
case 'base64': return base64_.encode(u8);
|
||
case 'buffer': return u8;
|
||
case 'binary': return Array.from({length: u8.length}, (_,i) => String.fromCharCode(u8[i])).join("");
|
||
}
|
||
throw new Error(`unsupported encoding ${e}`)
|
||
}
|
||
var fs = {
|
||
readFileSync: readFileSync2,
|
||
existsSync: (x: string): boolean => { try { Deno.readFileSync(x); } catch(e) { return false; } return true; },
|
||
readdirSync: (x: string) => ([...Deno.readDirSync(x)].filter(x => x.isFile).map(x => x.name))
|
||
};
|
||
|
||
var browser = false;
|
||
|
||
var Buffer_from = function(data: any, enc?: string): Uint8Array {
|
||
if(data instanceof Uint8Array) return data;
|
||
if((!enc || enc == "utf8") && typeof data == "string") return new TextEncoder().encode(data);
|
||
if(enc == "binary" && typeof data == "string") return Uint8Array.from({length: data.length}, (_,i) => data[i].charCodeAt(0))
|
||
throw `unsupported encoding ${enc}`;
|
||
}
|
||
|
||
var opts: any = {cellNF: true};
|
||
var TYPE: 'buffer' = "buffer";
|
||
opts.type = TYPE;
|
||
var fullex = [".xlsb", /*".xlsm",*/ ".xlsx"/*, ".xlml", ".xls"*/];
|
||
var ofmt: X.BookType[] = ["xlsb", "xlsm", "xlsx", "ods", "biff2", "biff5", "biff8", "xlml", "sylk", "dif", "dbf", "eth", "fods", "csv", "txt", "html"];
|
||
var ex = fullex.slice(); ex = ex.concat([".ods", ".xls", ".xml", ".fods"]);
|
||
opts.WTF = true;
|
||
opts.cellStyles = true;
|
||
if(Deno.env.get("FMTS") === "full") Deno.env.set("FMTS", ex.join(":"));
|
||
if(Deno.env.get("FMTS")) ex=(Deno.env.get("FMTS")||"").split(":").map(function(x){return x[0]==="."?x:"."+x;});
|
||
var exp = ex.map(function(x: string){ return x + ".pending"; });
|
||
function test_file(x: string){ return ex.indexOf(x.slice(-5))>=0||exp.indexOf(x.slice(-13))>=0 || ex.indexOf(x.slice(-4))>=0||exp.indexOf(x.slice(-12))>=0; }
|
||
|
||
var files: string[] = [], fileA: string[] = [];
|
||
if(!browser) {
|
||
var _files = fs.existsSync('tests.lst') ? fs.readFileSync('tests.lst', 'utf-8').split("\n").map(function(x) { return x.trim(); }) : fs.readdirSync('test_files');
|
||
for(var _filesi = 0; _filesi < _files.length; ++_filesi) if(test_file(_files[_filesi])) files.push(_files[_filesi]);
|
||
var _fileA = fs.existsSync('tests/testA.lst') ? fs.readFileSync('tests/testA.lst', 'utf-8').split("\n").map(function(x) { return x.trim(); }) : [];
|
||
for(var _fileAi = 0; _fileAi < _fileA.length; ++_fileAi) if(test_file(_fileA[_fileAi])) fileA.push(_fileA[_fileAi]);
|
||
}
|
||
|
||
var can_write_numbers = typeof Set !== "undefined" && typeof Array.prototype.findIndex == "function" && typeof Uint8Array !== "undefined" && typeof Uint8Array.prototype.indexOf == "function";
|
||
|
||
/* Excel enforces 31 character sheet limit, although technical file limit is 255 */
|
||
function fixsheetname(x: string): string { return x.substr(0,31); }
|
||
|
||
function stripbom(x: string): string { return x.replace(/^\ufeff/,""); }
|
||
function fixcsv(x: string): string { return stripbom(x).replace(/\t/g,",").replace(/#{255}/g,"").replace(/"/g,"").replace(/[\n\r]+/g,"\n").replace(/\n*$/,""); }
|
||
function fixjson(x: string): string { return x.replace(/[\r\n]+$/,""); }
|
||
|
||
var dir = "./test_files/";
|
||
|
||
var dirwp = dir + "artifacts/wps/", dirqp = dir + "artifacts/quattro/";
|
||
var paths: any = {
|
||
aadbf: dirwp + 'write.dbf',
|
||
aadif: dirwp + 'write.dif',
|
||
aaxls: dirwp + 'write.xls',
|
||
aaxlsx: dirwp + 'write.xlsx',
|
||
aaxml: dirwp + 'write.xml',
|
||
|
||
abcsv: dirqp + 'write_.csv',
|
||
abdif: dirqp + 'write_.dif',
|
||
abslk: dirqp + 'write_.slk',
|
||
abx57: dirqp + 'write_57.xls',
|
||
abwb2: dirqp + 'write_6.wb2',
|
||
abwb2b: dirqp + 'write_6b.wb2',
|
||
abwb3: dirqp + 'write_8.wb3',
|
||
abqpw: dirqp + 'write_9.qpw',
|
||
abx97: dirqp + 'write_97.xls',
|
||
abwks: dirqp + 'write_L1.wks',
|
||
abwk1: dirqp + 'write_L2.wk1',
|
||
abwk3: dirqp + 'write_L3.wk3',
|
||
abwk4: dirqp + 'write_L45.wk4',
|
||
ab123: dirqp + 'write_L9.123',
|
||
ab124: dirqp + 'write_L97.123',
|
||
abwke2: dirqp + 'write_Led.wke',
|
||
abwq1: dirqp + 'write_qpdos.wq1',
|
||
abwb1: dirqp + 'write_qpw.wb1',
|
||
|
||
afxls: dir + 'AutoFilter.xls',
|
||
afxml: dir + 'AutoFilter.xml',
|
||
afods: dir + 'AutoFilter.ods',
|
||
afxlsx: dir + 'AutoFilter.xlsx',
|
||
afxlsb: dir + 'AutoFilter.xlsb',
|
||
|
||
asxls: dir + 'author_snowman.xls',
|
||
asxls5: dir + 'author_snowman.xls5',
|
||
asxml: dir + 'author_snowman.xml',
|
||
asods: dir + 'author_snowman.ods',
|
||
asxlsx: dir + 'author_snowman.xlsx',
|
||
asxlsb: dir + 'author_snowman.xlsb',
|
||
|
||
cpxls: dir + 'custom_properties.xls',
|
||
cpxml: dir + 'custom_properties.xls.xml',
|
||
cpxlsx: dir + 'custom_properties.xlsx',
|
||
cpxlsb: dir + 'custom_properties.xlsb',
|
||
|
||
cssxls: dir + 'cell_style_simple.xls',
|
||
cssxml: dir + 'cell_style_simple.xml',
|
||
cssxlsx: dir + 'cell_style_simple.xlsx',
|
||
cssxlsb: dir + 'cell_style_simple.xlsb',
|
||
|
||
cstxls: dir + 'comments_stress_test.xls',
|
||
cstxml: dir + 'comments_stress_test.xls.xml',
|
||
cstxlsx: dir + 'comments_stress_test.xlsx',
|
||
cstxlsb: dir + 'comments_stress_test.xlsb',
|
||
cstods: dir + 'comments_stress_test.ods',
|
||
|
||
cwxls: dir + 'column_width.xls',
|
||
cwxls5: dir + 'column_width.biff5',
|
||
cwxml: dir + 'column_width.xml',
|
||
cwxlsx: dir + 'column_width.xlsx',
|
||
cwxlsb: dir + 'column_width.xlsb',
|
||
cwslk: dir + 'column_width.slk',
|
||
|
||
dnsxls: dir + 'defined_names_simple.xls',
|
||
dnsxml: dir + 'defined_names_simple.xml',
|
||
dnsxlsx: dir + 'defined_names_simple.xlsx',
|
||
dnsxlsb: dir + 'defined_names_simple.xlsb',
|
||
dnsslk: dir + 'defined_names_simple.slk',
|
||
|
||
dnuxls: dir + 'defined_names_unicode.xls',
|
||
dnuxml: dir + 'defined_names_unicode.xml',
|
||
dnuods: dir + 'defined_names_unicode.ods',
|
||
dnuxlsx: dir + 'defined_names_unicode.xlsx',
|
||
dnuxlsb: dir + 'defined_names_unicode.xlsb',
|
||
|
||
dtxls: dir + 'xlsx-stream-d-date-cell.xls',
|
||
dtxml: dir + 'xlsx-stream-d-date-cell.xls.xml',
|
||
dtxlsx: dir + 'xlsx-stream-d-date-cell.xlsx',
|
||
dtxlsb: dir + 'xlsx-stream-d-date-cell.xlsb',
|
||
|
||
dtfxlsx: dir + 'DataTypesFormats.xlsx',
|
||
|
||
fstxls: dir + 'formula_stress_test.xls',
|
||
fstxml: dir + 'formula_stress_test.xls.xml',
|
||
fstxlsx: dir + 'formula_stress_test.xlsx',
|
||
fstxlsb: dir + 'formula_stress_test.xlsb',
|
||
fstods: dir + 'formula_stress_test.ods',
|
||
|
||
hlxls: dir + 'hyperlink_stress_test_2011.xls',
|
||
hlxml: dir + 'hyperlink_stress_test_2011.xml',
|
||
hlxlsx: dir + 'hyperlink_stress_test_2011.xlsx',
|
||
hlxlsb: dir + 'hyperlink_stress_test_2011.xlsb',
|
||
|
||
ilxls: dir + 'internal_link.xls',
|
||
ilxls5: dir + 'internal_link.biff5',
|
||
ilxml: dir + 'internal_link.xml',
|
||
ilxlsx: dir + 'internal_link.xlsx',
|
||
ilxlsb: dir + 'internal_link.xlsb',
|
||
ilods: dir + 'internal_link.ods',
|
||
|
||
lonxls: dir + 'LONumbers.xls',
|
||
lonxlsx: dir + 'LONumbers.xlsx',
|
||
|
||
m19xlsx: dir + 'metadata_2019.xlsx',
|
||
m19xlsb: dir + 'metadata_2019.xlsb',
|
||
|
||
mcxls: dir + 'merge_cells.xls',
|
||
mcxml: dir + 'merge_cells.xls.xml',
|
||
mcxlsx: dir + 'merge_cells.xlsx',
|
||
mcxlsb: dir + 'merge_cells.xlsb',
|
||
mcods: dir + 'merge_cells.ods',
|
||
|
||
nfxls: dir + 'number_format.xls',
|
||
nfxml: dir + 'number_format.xls.xml',
|
||
nfxlsx: dir + 'number_format.xlsm',
|
||
nfxlsb: dir + 'number_format.xlsb',
|
||
|
||
olxls: dir + 'outline.xls',
|
||
olxls5: dir + 'outline.biff5',
|
||
olxlsx: dir + 'outline.xlsx',
|
||
olxlsb: dir + 'outline.xlsb',
|
||
olods: dir + 'outline.ods',
|
||
|
||
pmxls: dir + 'page_margins_2016.xls',
|
||
pmxls5: dir + 'page_margins_2016_5.xls',
|
||
pmxml: dir + 'page_margins_2016.xml',
|
||
pmxlsx: dir + 'page_margins_2016.xlsx',
|
||
pmxlsb: dir + 'page_margins_2016.xlsb',
|
||
|
||
rhxls: dir + 'row_height.xls',
|
||
rhxls5: dir + 'row_height.biff5',
|
||
rhxml: dir + 'row_height.xml',
|
||
rhxlsx: dir + 'row_height.xlsx',
|
||
rhxlsb: dir + 'row_height.xlsb',
|
||
rhslk: dir + 'row_height.slk',
|
||
|
||
svxls: dir + 'sheet_visibility.xls',
|
||
svxls5: dir + 'sheet_visibility5.xls',
|
||
svxml: dir + 'sheet_visibility.xml',
|
||
svxlsx: dir + 'sheet_visibility.xlsx',
|
||
svxlsb: dir + 'sheet_visibility.xlsb',
|
||
svods: dir + 'sheet_visibility.ods',
|
||
|
||
swcxls: dir + 'apachepoi_SimpleWithComments.xls',
|
||
swcxml: dir + '2011/apachepoi_SimpleWithComments.xls.xml',
|
||
swcxlsx: dir + 'apachepoi_SimpleWithComments.xlsx',
|
||
swcxlsb: dir + '2013/apachepoi_SimpleWithComments.xlsx.xlsb'
|
||
};
|
||
|
||
function pathit(p: string, ext: string[]) { return ext.map(function(n) { return paths[p + n]; }); }
|
||
var FSTPaths = pathit("fst", ["xlsx", "xlsb", "xls", "xml", "ods"]);
|
||
var CSTPaths = pathit("cst", ["xlsx", "xlsb", "xls", "xml", "ods"]);
|
||
var MCPaths = pathit("mc", ["xlsx", "xlsb", "xls", "xml", "ods"]);
|
||
var CSSPaths = pathit("css", ["xlsx", "xlsb", "xls", "xml"]);
|
||
var NFPaths = pathit("nf", ["xlsx", "xlsb", "xls", "xml"]);
|
||
var DTPaths = pathit("dt", ["xlsx", "xlsb", "xls", "xml"]);
|
||
var HLPaths = pathit("hl", ["xlsx", "xlsb", "xls", "xml"]);
|
||
var ILPaths = pathit("il", ["xlsx", "xlsb", "xls", "xml", "ods", "xls5"]);
|
||
var OLPaths = pathit("ol", ["xlsx", "xlsb", "xls", "ods", "xls5"]);
|
||
var PMPaths = pathit("pm", ["xlsx", "xlsb", "xls", "xml", "xls5"]);
|
||
var SVPaths = pathit("sv", ["xlsx", "xlsb", "xls", "xml", "xls5"]);
|
||
var CWPaths = pathit("cw", ["xlsx", "xlsb", "xls", "xml", "xls5", "slk"]);
|
||
var RHPaths = pathit("rh", ["xlsx", "xlsb", "xls", "xml", "xls5", "slk"]);
|
||
|
||
var artifax = [
|
||
"cstods", "cstxls", "cstxlsb", "cstxlsb", "cstxml", "aadbf", "aadif",
|
||
"aaxls", "aaxml", "aaxlsx", "ab123", "ab124", "abcsv", "abdif", "abqpw",
|
||
"abslk", "abwb1", "abwb2", "abwb3", "abwk1", "abwk3", "abwk4", "abwks",
|
||
"abwq1", "abx57", "abx97", "abwke2", "abwb2b"
|
||
].map(function(x) { return paths[x]; });
|
||
|
||
async function parsetest(x: string, wb: X.WorkBook, full: boolean, ext: string, t: Deno.TestContext) {
|
||
ext = (ext ? " [" + ext + "]": "");
|
||
if(!full && ext) return;
|
||
await t.step(x + ext + ' should have all bits', async function(t) {
|
||
var sname = dir + '2016/' + x.substr(x.lastIndexOf('/')+1) + '.sheetnames';
|
||
if(!fs.existsSync(sname)) sname = dir + '2011/' + x.substr(x.lastIndexOf('/')+1) + '.sheetnames';
|
||
if(!fs.existsSync(sname)) sname = dir + '2013/' + x.substr(x.lastIndexOf('/')+1) + '.sheetnames';
|
||
await t.step('should have all sheets', async function(t) {
|
||
wb.SheetNames.forEach(function(y) { assert.assert(wb.Sheets[y], 'bad sheet ' + y); });
|
||
});
|
||
if(fs.existsSync(sname)) await t.step('should have the right sheet names', async function(t) {
|
||
var file = fs.readFileSync(sname, 'utf-8').replace(/\r/g,"");
|
||
var names = wb.SheetNames.map(fixsheetname).join("\n") + "\n";
|
||
if(file.length && !x.match(/artifacts/)) assert.equal(names, file);
|
||
});
|
||
});
|
||
await t.step(x + ext + ' should generate CSV', async function(t) {
|
||
for(var i = 0; i < wb.SheetNames.length; ++i) { var ws = wb.SheetNames[i];
|
||
await t.step('#' + i + ' (' + ws + ')', async function(t) {
|
||
X.utils.sheet_to_csv(wb.Sheets[ws]);
|
||
});
|
||
}
|
||
});
|
||
await t.step(x + ext + ' should generate JSON', async function(t) {
|
||
for(var i = 0; i < wb.SheetNames.length; ++i) { var ws = wb.SheetNames[i];
|
||
await t.step('#' + i + ' (' + ws + ')', async function(t) {
|
||
X.utils.sheet_to_json(wb.Sheets[ws]);
|
||
});
|
||
}
|
||
});
|
||
await t.step(x + ext + ' should generate formulae', async function(t) {
|
||
for(var i = 0; i < wb.SheetNames.length; ++i) { var ws = wb.SheetNames[i];
|
||
await t.step('#' + i + ' (' + ws + ')', async function(t) {
|
||
X.utils.sheet_to_formulae(wb.Sheets[ws]);
|
||
});
|
||
}
|
||
});
|
||
if(!full) return;
|
||
var getfile = function(dir: string, x: string, i: number, type: string) {
|
||
var name = (dir + x + '.' + i + type);
|
||
var root = "";
|
||
if(x.substr(-5) === ".xlsb") {
|
||
root = x.slice(0,-5);
|
||
if(!fs.existsSync(name)) name=(dir + root + '.xlsx.' + i + type);
|
||
if(!fs.existsSync(name)) name=(dir + root + '.xlsm.' + i + type);
|
||
if(!fs.existsSync(name)) name=(dir + root + '.xls.' + i + type);
|
||
}
|
||
if(x.substr(-4) === ".xls") {
|
||
root = x.slice(0,-4);
|
||
if(!fs.existsSync(name)) name=(dir + root + '.xlsx.' + i + type);
|
||
if(!fs.existsSync(name)) name=(dir + root + '.xlsm.' + i + type);
|
||
if(!fs.existsSync(name)) name=(dir + root + '.xlsb.' + i + type);
|
||
}
|
||
return name;
|
||
};
|
||
await t.step(x + ext + ' should generate correct CSV output', async function(t) {
|
||
for(var i = 0; i < wb.SheetNames.length; ++i) { var ws = wb.SheetNames[i];
|
||
var name = getfile(dir, x, i, ".csv");
|
||
if(fs.existsSync(name)) await t.step('#' + i + ' (' + ws + ')', async function(t) {
|
||
var file = fs.readFileSync(name, 'utf-8');
|
||
var csv = X.utils.sheet_to_csv(wb.Sheets[ws]);
|
||
assert.equal(fixcsv(csv), fixcsv(file), "CSV badness");
|
||
});
|
||
}
|
||
});
|
||
if(typeof JSON !== 'undefined') await t.step(x + ext + ' should generate correct JSON output', async function(t) {
|
||
for(var i = 0; i < wb.SheetNames.length; ++i) { var ws = wb.SheetNames[i];
|
||
var rawjson = getfile(dir, x, i, ".rawjson");
|
||
if(fs.existsSync(rawjson)) await t.step('#' + i + ' (' + ws + ')', async function(t) {
|
||
var file = fs.readFileSync(rawjson, 'utf-8');
|
||
var json: Array<any> = X.utils.sheet_to_json(wb.Sheets[ws],{raw:true});
|
||
assert.equal(JSON.stringify(json), fixjson(file), "JSON badness");
|
||
});
|
||
|
||
var jsonf = getfile(dir, x, i, ".json");
|
||
if(fs.existsSync(jsonf)) await t.step('#' + i + ' (' + ws + ')', async function(t) {
|
||
var file = fs.readFileSync(jsonf, 'utf-8');
|
||
var json: Array<any> = X.utils.sheet_to_json(wb.Sheets[ws], {raw:false});
|
||
assert.equal(JSON.stringify(json), fixjson(file), "JSON badness");
|
||
});
|
||
}
|
||
});
|
||
if(fs.existsSync(dir + '2011/' + x + '.xml'))
|
||
await t.step(x + ext + '.xml from 2011', async function(t) {
|
||
await t.step('should parse', async function(t) {
|
||
/*var wb = */X.readFile(dir + '2011/' + x + '.xml', opts);
|
||
});
|
||
});
|
||
if(fs.existsSync(dir + '2013/' + x + '.xlsb'))
|
||
await t.step(x + ext + '.xlsb from 2013', async function(t) {
|
||
await t.step('should parse', async function(t) {
|
||
/*var wb = */X.readFile(dir + '2013/' + x + '.xlsb', opts);
|
||
});
|
||
});
|
||
if(fs.existsSync(dir + x + '.xml' + ext))
|
||
await t.step(x + '.xml', async function(t) {
|
||
await t.step('should parse', async function(t) {
|
||
/*var wb = */X.readFile(dir + x + '.xml', opts);
|
||
});
|
||
});
|
||
}
|
||
|
||
Deno.test('should parse test files', async function(t) {
|
||
var idx = 0, x = "";
|
||
for(idx = 0; idx < files.length; ++idx) { var x = files[idx];
|
||
if(x.slice(-8) == ".pending" || !fs.existsSync(dir + x)) continue;
|
||
await t.step(x, async function(t) {
|
||
var _wb = X.readFile(dir + x, opts);
|
||
await parsetest(x, _wb, true, "", t);
|
||
for(var fidx = 0; fidx < fullex.length; ++fidx) { var ext = fullex[fidx];
|
||
await t.step(x + ' [' + ext + ']', async function(t){
|
||
var wb = _wb;
|
||
wb = X.read(X.write(wb, {type:"buffer", bookType:(ext.replace(/\./,"") as X.BookType)}), {WTF:opts.WTF, cellNF: true});
|
||
await parsetest(x, wb, ext.replace(/\./,"") !== "xlsb", ext, t);
|
||
});
|
||
}
|
||
});
|
||
}
|
||
for(idx = 0; idx < fileA.length; ++idx) { var x = fileA[idx];
|
||
if(x.slice(-8) == ".pending" || !fs.existsSync(dir + x)) continue;
|
||
await t.step(x, async function(t) {
|
||
var wb = X.readFile(dir + x, {WTF:opts.WTF, sheetRows:10});
|
||
await parsetest(x, wb, false, "", t);
|
||
});
|
||
}
|
||
});
|
||
|
||
function get_cell(ws: X.WorkSheet, addr: string) {
|
||
if(!Array.isArray(ws)) return ws[addr];
|
||
var a = X.utils.decode_cell(addr);
|
||
return (ws[a.r]||[])[a.c];
|
||
}
|
||
|
||
function each_cell(ws: X.WorkSheet, f: (c: X.CellObject) => any) {
|
||
if(Array.isArray(ws)) ws.forEach(function(row) { if(row) row.forEach(f); });
|
||
else Object.keys(ws).forEach(function(addr) { if(addr[0] === "!" || !ws.hasOwnProperty(addr)) return; f(ws[addr]); });
|
||
}
|
||
|
||
function each_sheet(wb: X.WorkBook, f: (ws: X.WorkSheet, i: number)=>any) { wb.SheetNames.forEach(function(n, i) { f(wb.Sheets[n], i); }); }
|
||
|
||
/* comments_stress_test family */
|
||
function check_comments(wb: X.WorkBook) {
|
||
var ws0 = wb.Sheets["Sheet2"];
|
||
assert.equal(get_cell(ws0,"A1").c[0].a, 'Author');
|
||
assert.equal(get_cell(ws0,"A1").c[0].t, 'Author:\nGod thinks this is good');
|
||
assert.equal(get_cell(ws0,"C1").c[0].a, 'Author');
|
||
assert.equal(get_cell(ws0,"C1").c[0].t, 'I really hope that xlsx decides not to use magic like rPr');
|
||
|
||
var ws3 = wb.Sheets["Sheet4"];
|
||
assert.equal(get_cell(ws3,"B1").c[0].a, 'Author');
|
||
assert.equal(get_cell(ws3,"B1").c[0].t, 'The next comment is empty');
|
||
assert.equal(get_cell(ws3,"B2").c[0].a, 'Author');
|
||
assert.equal(get_cell(ws3,"B2").c[0].t, '');
|
||
}
|
||
|
||
Deno.test('parse options', async function(t) {
|
||
var html_cell_types = ['s'];
|
||
await t.step('cell', async function(t) {
|
||
await t.step('XLSX should generate HTML by default', async function(t) {
|
||
var wb = X.read(fs.readFileSync(paths.cstxlsx), {type:TYPE});
|
||
var ws = wb.Sheets["Sheet1"];
|
||
each_cell(ws, function(cell) {
|
||
assert.assert(html_cell_types.indexOf(cell.t) === -1 || cell.h);
|
||
});
|
||
});
|
||
await t.step('XLSX should not generate HTML when requested', async function(t) {
|
||
var wb = X.read(fs.readFileSync(paths.cstxlsx), {type:TYPE, cellHTML:false});
|
||
var ws = wb.Sheets["Sheet1"];
|
||
each_cell(ws, function(cell) {
|
||
assert.assert(typeof cell.h === 'undefined');
|
||
});
|
||
});
|
||
await t.step('should generate formulae by default', async function(t) {
|
||
FSTPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE});
|
||
var found = false;
|
||
wb.SheetNames.forEach(function(s) {
|
||
each_cell(wb.Sheets[s], function(cell) {
|
||
if(typeof cell.f !== 'undefined') return (found = true);
|
||
});
|
||
});
|
||
assert.assert(found);
|
||
});
|
||
});
|
||
await t.step('should not generate formulae when requested', async function(t) {
|
||
FSTPaths.forEach(function(p) {
|
||
var wb =X.read(fs.readFileSync(p),{type:TYPE,cellFormula:false});
|
||
wb.SheetNames.forEach(function(s) {
|
||
each_cell(wb.Sheets[s], function(cell) {
|
||
assert.assert(typeof cell.f === 'undefined');
|
||
});
|
||
});
|
||
});
|
||
});
|
||
await t.step('should generate formatted text by default', async function(t) {
|
||
FSTPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p),{type:TYPE});
|
||
var found = false;
|
||
wb.SheetNames.forEach(function(s) {
|
||
var ws = wb.Sheets[s];
|
||
each_cell(ws, function(cell) {
|
||
if(typeof cell.w !== 'undefined') return (found = true);
|
||
});
|
||
});
|
||
assert.assert(found);
|
||
});
|
||
});
|
||
await t.step('should not generate formatted text when requested', async function(t) {
|
||
FSTPaths.forEach(function(p) {
|
||
var wb =X.read(fs.readFileSync(p),{type:TYPE, cellText:false});
|
||
wb.SheetNames.forEach(function(s) {
|
||
var ws = wb.Sheets[s];
|
||
each_cell(ws, function(cell) {
|
||
assert.assert(typeof cell.w === 'undefined');
|
||
});
|
||
});
|
||
});
|
||
});
|
||
await t.step('should not generate number formats by default', async function(t) {
|
||
NFPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE});
|
||
wb.SheetNames.forEach(function(s) {
|
||
var ws = wb.Sheets[s];
|
||
each_cell(ws, function(cell) {
|
||
assert.assert(typeof cell.z === 'undefined');
|
||
});
|
||
});
|
||
});
|
||
});
|
||
await t.step('should generate number formats when requested', async function(t) {
|
||
NFPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, cellNF: true});
|
||
wb.SheetNames.forEach(function(s) {
|
||
var ws = wb.Sheets[s];
|
||
each_cell(ws, function(cell) {
|
||
assert.assert(cell.t!== 'n' || typeof cell.z !== 'undefined');
|
||
});
|
||
});
|
||
});
|
||
});
|
||
await t.step('should not generate cell styles by default', async function(t) {
|
||
CSSPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, WTF:true});
|
||
wb.SheetNames.forEach(function(s) {
|
||
var ws = wb.Sheets[s];
|
||
each_cell(ws, function(cell) {
|
||
assert.assert(typeof cell.s === 'undefined');
|
||
});
|
||
});
|
||
});
|
||
});
|
||
await t.step('should generate cell styles when requested', async function(t) {
|
||
/* TODO: XLS / XLML */
|
||
[paths.cssxlsx /*, paths.cssxlsb, paths.cssxls, paths.cssxml*/].forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, cellStyles:true});
|
||
var found = false;
|
||
each_sheet(wb, function(ws/*::, i*/) { /*:: void i; */each_cell(ws, function(cell) {
|
||
if(typeof cell.s !== 'undefined') return (found = true);
|
||
}); });
|
||
assert.assert(found);
|
||
});
|
||
});
|
||
await t.step('should not generate cell dates by default', async function(t) {
|
||
DTPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE});
|
||
each_sheet(wb, function(ws/*::, i*/) { /*:: void i; */each_cell(ws, function(cell) {
|
||
assert.assert(cell.t !== 'd');
|
||
}); });
|
||
});
|
||
});
|
||
await t.step('should generate cell dates when requested', async function(t) {
|
||
DTPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, cellDates: true, WTF:true});
|
||
var found = false;
|
||
each_sheet(wb, function(ws/*::, i*/) { /*:: void i; */each_cell(ws, function(cell) {
|
||
if(cell.t === 'd') return (found = true);
|
||
}); });
|
||
assert.assert(found);
|
||
});
|
||
});
|
||
await t.step('should preserve `_xlfn.` only when requested', async function(t) {
|
||
var wb = { SheetNames: ["Sheet1"], Sheets: { Sheet1: {
|
||
"!ref": "A1:A1",
|
||
"A1": { t:"n", v:2, f:"_xlfn.IFS(2>3,1,3>2,2)"}
|
||
} } };
|
||
var str = X.write(wb, {bookType: "xlsx", type: "binary"});
|
||
var wb2 = X.read(str, {type: "binary"});
|
||
/*jshint -W069 */
|
||
assert.equal(wb2.Sheets["Sheet1"]["A1"].f, "IFS(2>3,1,3>2,2)");
|
||
var wb3 = X.read(str, {type: "binary", xlfn: true});
|
||
assert.equal(wb3.Sheets["Sheet1"]["A1"].f, "_xlfn.IFS(2>3,1,3>2,2)");
|
||
/*jshint +W069 */
|
||
});
|
||
});
|
||
await t.step('sheet', async function(t) {
|
||
await t.step('should not generate sheet stubs by default', async function(t) {
|
||
MCPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE});
|
||
assert.throws(function() { return get_cell(wb.Sheets["Merge"], "A2").v; });
|
||
});
|
||
});
|
||
await t.step('should generate sheet stubs when requested', async function(t) {
|
||
MCPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, sheetStubs:true});
|
||
assert.assert(get_cell(wb.Sheets["Merge"], "A2").t == 'z');
|
||
});
|
||
});
|
||
await t.step('should handle stub cells', async function(t) {
|
||
MCPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, sheetStubs:true});
|
||
X.utils.sheet_to_csv(wb.Sheets["Merge"]);
|
||
X.utils.sheet_to_json(wb.Sheets["Merge"]);
|
||
X.utils.sheet_to_formulae(wb.Sheets["Merge"]);
|
||
ofmt.forEach(function(f) { if(f != "dbf") X.write(wb, {type:TYPE, bookType:f}); });
|
||
});
|
||
});
|
||
function checkcells(wb: X.WorkBook, A46: boolean, B26: boolean, C16: boolean, D2: boolean) {
|
||
([ ["A46", A46], ["B26", B26], ["C16", C16], ["D2", D2] ] as Array<[string, boolean]>).forEach(function(r: [string, boolean]) {
|
||
assert.assert((typeof get_cell(wb.Sheets["Text"], r[0]) !== 'undefined') == r[1]);
|
||
});
|
||
}
|
||
await t.step('should read all cells by default', async function(t) { FSTPaths.forEach(function(p) {
|
||
checkcells(X.read(fs.readFileSync(p), {type:TYPE}), true, true, true, true);
|
||
}); });
|
||
await t.step('sheetRows n=30', async function(t) { FSTPaths.forEach(function(p) {
|
||
checkcells(X.read(fs.readFileSync(p), {type:TYPE, sheetRows:30}), false, true, true, true);
|
||
}); });
|
||
await t.step('sheetRows n=20', async function(t) { FSTPaths.forEach(function(p) {
|
||
checkcells(X.read(fs.readFileSync(p), {type:TYPE, sheetRows:20}), false, false, true, true);
|
||
}); });
|
||
await t.step('sheetRows n=10', async function(t) { FSTPaths.forEach(function(p) {
|
||
checkcells(X.read(fs.readFileSync(p), {type:TYPE, sheetRows:10}), false, false, false, true);
|
||
}); });
|
||
await t.step('sheetRows n=1', async function(t) { ofmt.forEach(function(fmt) {
|
||
([TYPE, "base64", "binary", "array"] as Array<'binary' | 'base64' | 'binary' | 'array'>).forEach(function(ot) {
|
||
var data = [[1,2],[3,4],[5,6]];
|
||
var ws = X.utils.aoa_to_sheet(data);
|
||
assert.assert(ws['!ref'] === "A1:B3");
|
||
var wb = X.utils.book_new();
|
||
X.utils.book_append_sheet(wb, ws, "Sheet1");
|
||
var bs = X.write(wb, { bookType: fmt, type: ot, WTF:true });
|
||
|
||
var wb0 = X.read(bs, { type: ot, WTF:true });
|
||
var ws0 = wb0.Sheets["Sheet1"];
|
||
assert.equal(ws0['!ref'], "A1:B3");
|
||
assert.equal(get_cell(ws0, "A1").v, fmt == "dbf" ? "1" : 1);
|
||
assert.equal(get_cell(ws0, "B2").v, 4);
|
||
assert.equal(get_cell(ws0, "A3").v, 5);
|
||
|
||
var wb1 = X.read(bs, { type: ot, sheetRows: 1 });
|
||
var ws1 = wb1.Sheets["Sheet1"];
|
||
assert.equal(ws1['!ref'], "A1:B1");
|
||
assert.equal(get_cell(ws1, "A1").v, fmt == "dbf" ? "1" : 1);
|
||
assert.assert(!get_cell(ws1, "B2"));
|
||
assert.assert(!get_cell(ws1, "A3"));
|
||
if(ws1['!fullref']) assert.equal(ws1['!fullref'], "A1:B3");
|
||
|
||
var wb2 = X.read(bs, { type: ot, sheetRows: 2 });
|
||
var ws2 = wb2.Sheets["Sheet1"];
|
||
assert.equal(ws2['!ref'], "A1:B2");
|
||
assert.equal(get_cell(ws2, "A1").v, fmt == "dbf" ? "1" : 1);
|
||
assert.equal(get_cell(ws2, "B2").v, 4);
|
||
assert.assert(!get_cell(ws2, "A3"));
|
||
if(ws2['!fullref']) assert.equal(ws2['!fullref'], "A1:B3");
|
||
|
||
var wb3 = X.read(bs, { type: ot, sheetRows: 3 });
|
||
var ws3 = wb3.Sheets["Sheet1"];
|
||
assert.equal(ws3['!ref'], "A1:B3");
|
||
assert.equal(get_cell(ws3, "A1").v, fmt == "dbf" ? "1" : 1);
|
||
assert.equal(get_cell(ws3, "B2").v, 4);
|
||
assert.equal(get_cell(ws3, "A3").v, 5);
|
||
if(ws3['!fullref']) assert.equal(ws3['!fullref'], "A1:B3");
|
||
}); }); });
|
||
});
|
||
await t.step('book', async function(t) {
|
||
await t.step('bookSheets should not generate sheets', async function(t) {
|
||
MCPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, bookSheets:true});
|
||
assert.assert(typeof wb.Sheets === 'undefined');
|
||
});
|
||
});
|
||
await t.step('bookProps should not generate sheets', async function(t) {
|
||
NFPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, bookProps:true});
|
||
assert.assert(typeof wb.Sheets === 'undefined');
|
||
});
|
||
});
|
||
await t.step('bookProps && bookSheets should not generate sheets', async function(t) {
|
||
PMPaths.forEach(function(p) {
|
||
if(!fs.existsSync(p)) return;
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE, bookProps:true, bookSheets:true});
|
||
assert.assert(typeof wb.Sheets === 'undefined');
|
||
});
|
||
});
|
||
|
||
var FSTXL = [
|
||
[paths.fstxlsx, true],
|
||
[paths.fstxlsb, true],
|
||
[paths.fstxls, false]
|
||
];
|
||
await t.step('should not generate deps by default', async function(t) {
|
||
FSTPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE});
|
||
//assert.assert(typeof wb.Deps === 'undefined' || !(wb.Deps && wb.Deps.length>0));
|
||
});
|
||
});
|
||
await t.step('bookDeps should generate deps (XLSX/XLSB)', async function(t) {
|
||
FSTXL.forEach(function(p) {
|
||
if(!p[1]) return;
|
||
var wb = X.read(fs.readFileSync(p[0]), {type:TYPE, bookDeps:true});
|
||
//assert.assert(typeof wb.Deps !== 'undefined' && wb.Deps.length > 0);
|
||
});
|
||
});
|
||
|
||
var ckf = function(wb: X.WorkBook, fields: string[], exists: boolean) { fields.forEach(function(f) { assert.assert((typeof (wb as any)[f] !== 'undefined') == exists); }); };
|
||
await t.step('should not generate book files by default', async function(t) {FSTXL.forEach(function(r) {
|
||
var wb = X.read(fs.readFileSync(r[0]), {type:TYPE});
|
||
ckf(wb, r[1] ? ['files', 'keys'] : ['cfb'], false);
|
||
}); });
|
||
await t.step('bookFiles should generate book files', async function(t) {FSTXL.forEach(function(r) {
|
||
var wb = X.read(fs.readFileSync(r[0]), {type:TYPE, bookFiles:true});
|
||
ckf(wb, r[1] ? ['files', 'keys'] : ['cfb'], true);
|
||
}); });
|
||
|
||
var NFVBA = ["nfxlsx", "nfxlsb", "nfxls"].map(function(n) { return paths[n]; });
|
||
await t.step('should not generate VBA by default', async function(t) { NFPaths.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p), {type:TYPE}); assert.assert(typeof wb.vbaraw === 'undefined');
|
||
}); });
|
||
await t.step('bookVBA should generate vbaraw', async function(t) { NFVBA.forEach(function(p) {
|
||
var wb = X.read(fs.readFileSync(p),{type: TYPE, bookVBA: true});
|
||
assert.assert(wb.vbaraw);
|
||
var cfb = X.CFB.read(wb.vbaraw, {type: 'array'});
|
||
assert.assert(X.CFB.find(cfb, '/VBA/ThisWorkbook'));
|
||
}); });
|
||
});
|
||
});
|
||
|
||
Deno.test('input formats', async function(t) {
|
||
await t.step('should read binary strings', async function(t) { artifax.forEach(function(p) {
|
||
X.read(fs.readFileSync(p, 'binary'), {type: 'binary'});
|
||
}); });
|
||
await t.step('should read base64 strings', async function(t) { artifax.forEach(function(p) {
|
||
X.read(fs.readFileSync(p, 'base64'), {type: 'base64'});
|
||
}); });
|
||
await t.step('handles base64 within data URI scheme (gh-2762)', async function(t) {
|
||
var data = 'TmFtZXMNCkhhZmV6DQpTYW0NCg==';
|
||
|
||
var wb0 = X.read(data, { type: 'base64' }); // raw base64 string
|
||
var wb1 = X.read('data:;base64,' + data, { type: 'base64' }); // data URI, no media type
|
||
var wb2 = X.read('data:text/csv;base64,' + data, { type: 'base64' }); // data URI, CSV type
|
||
var wb3 = X.read('data:application/vnd.ms-excel;base64,' + data, { type: 'base64' }); // data URI, Excel
|
||
|
||
[wb0, wb1, wb2, wb3].forEach(function(wb) {
|
||
var ws = wb.Sheets.Sheet1;
|
||
assert.equal(ws["!ref"], "A1:A3");
|
||
assert.equal(get_cell(ws, "A1").v, "Names");
|
||
assert.equal(get_cell(ws, "A2").v, "Hafez");
|
||
assert.equal(get_cell(ws, "A3").v, "Sam");
|
||
});
|
||
});
|
||
if(typeof Uint8Array !== 'undefined') await t.step('should read array', async function(t) { artifax.forEach(function(p) {
|
||
X.read(fs.readFileSync(p, 'binary').split("").map(function(x) { return x.charCodeAt(0); }), {type:'array'});
|
||
}); });
|
||
await t.step('should read Buffers', async function(t) { artifax.forEach(function(p) {
|
||
X.read(fs.readFileSync(p), {type: 'buffer'});
|
||
}); });
|
||
if(typeof Uint8Array !== 'undefined') await t.step('should read ArrayBuffer / Uint8Array', async function(t) { artifax.forEach(function(p) {
|
||
var payload: any = fs.readFileSync(p, "buffer");
|
||
var ab = new ArrayBuffer(payload.length), vu = new Uint8Array(ab);
|
||
for(var i = 0; i < payload.length; ++i) vu[i] = payload[i];
|
||
X.read(ab, {type: 'array'});
|
||
X.read(vu, {type: 'array'});
|
||
}); });
|
||
await t.step('should throw if format is unknown', async function(t) { artifax.forEach(function(p) {
|
||
assert.throws(function() { X.read(fs.readFileSync(p), ({type: 'dafuq'} as any)); });
|
||
}); });
|
||
|
||
var T = browser ? 'base64' : 'buffer';
|
||
await t.step('should default to "' + T + '" type', async function(t) { artifax.forEach(function(p) {
|
||
X.read(fs.readFileSync(p));
|
||
}); });
|
||
if(!browser) await t.step('should read files', async function(t) { artifax.forEach(function(p) { X.readFile(p); }); });
|
||
});
|
||
|
||
Deno.test('output formats', async function(t) {
|
||
var fmts = [
|
||
/* fmt unicode str */
|
||
["xlsx", true, false],
|
||
["xlsb", true, false],
|
||
["xls", true, false],
|
||
["xlml", true, true],
|
||
["ods", true, false],
|
||
["fods", true, true],
|
||
["csv", true, true],
|
||
["txt", true, true],
|
||
["rtf", false, true],
|
||
["sylk", false, true],
|
||
["eth", false, true],
|
||
["html", true, true],
|
||
["dif", false, true],
|
||
["dbf", false, false],
|
||
["prn", false, true]
|
||
];
|
||
function RT(T: 'string' | 'binary' | 'base64' | 'array' | 'buffer') {
|
||
fmts.forEach(function(fmt) {
|
||
var wb = X.utils.book_new();
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([['R',"\u2603"],["\u0BEE",2]]), "Sheet1");
|
||
if(T == 'string' && !fmt[2]) return assert.throws(function() {X.write(wb, {type: T, bookType:(fmt[0] as X.BookType), WTF:true});});
|
||
var out = X.write(wb, {type: T, bookType:(fmt[0] as X.BookType), WTF:true});
|
||
var nwb = X.read(out, {type: T, PRN: fmt[0] == 'prn', WTF:true});
|
||
var nws = nwb.Sheets[nwb.SheetNames[0]];
|
||
assert.equal(get_cell(nws, "B2").v, 2);
|
||
assert.equal(get_cell(nws, "A1").v, "R");
|
||
if(fmt[1]) assert.equal(get_cell(nws, "A2").v, "\u0BEE");
|
||
if(fmt[1]) assert.equal(get_cell(nws, "B1").v, "\u2603");
|
||
});
|
||
}
|
||
await t.step('should write binary strings', async function(t) { RT('binary'); });
|
||
await t.step('should write base64 strings', async function(t) { RT('base64'); });
|
||
await t.step('should write JS strings', async function(t) { RT('string'); });
|
||
if(typeof ArrayBuffer !== 'undefined') await t.step('should write array buffers', async function(t) { RT('array'); });
|
||
if(!browser) await t.step('should write buffers', async function(t) { RT('buffer'); });
|
||
await t.step('should throw if format is unknown', async function(t) { assert.throws(function() { RT('dafuq' as any); }); });
|
||
});
|
||
|
||
function eqarr(a: any[],b: any[]) {
|
||
assert.equal(a.length, b.length);
|
||
a.forEach(function(x, i) { assert.equal(x, b[i]); });
|
||
}
|
||
|
||
Deno.test('API', async function(t) {
|
||
await t.step('book_append_sheet', async function(t) {
|
||
var wb = X.utils.book_new();
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[1,2,3],[4],[5]]), "A");
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[1,2,3],[4],[5]]));
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[1,2,3],[4],[5]]));
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[1,2,3],[4],[5]]), "B");
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[1,2,3],[4],[5]]));
|
||
eqarr(wb.SheetNames, ["A","Sheet1","Sheet2","B","Sheet3"]);
|
||
});
|
||
await t.step('sheet_add_json', async function(t) {
|
||
var ws = X.utils.json_to_sheet([{A:"S", B:"h", C:"e", D:"e", E:"t", F:"J", G:"S"}], {header:["A","B","C","D","E","F","G"], skipHeader:true});
|
||
X.utils.sheet_add_json(ws, [{A:1, B:2}, {A:2, B:3}, {A:3, B:4}], {skipHeader:true, origin:"A2"});
|
||
X.utils.sheet_add_json(ws, [{A:5, B:6, C:7}, {A:6, B:7, C:8}, {A:7, B:8, C:9}], {skipHeader:true, origin:{r:1, c:4}, header:["A","B","C"]});
|
||
X.utils.sheet_add_json(ws, [{A:4, B:5, C:6, D:7, E:8, F:9, G:0}], {header:["A","B","C","D","E","F","G"], skipHeader:true, origin:-1});
|
||
assert.equal(X.utils.sheet_to_csv(ws).trim(), "S,h,e,e,t,J,S\n1,2,,,5,6,7\n2,3,,,6,7,8\n3,4,,,7,8,9\n4,5,6,7,8,9,0");
|
||
});
|
||
await t.step('sheet_add_aoa', async function(t) {
|
||
var ws = X.utils.aoa_to_sheet([ "SheetJS".split("") ]);
|
||
X.utils.sheet_add_aoa(ws, [[1,2], [2,3], [3,4]], {origin: "A2"});
|
||
X.utils.sheet_add_aoa(ws, [[5,6,7], [6,7,8], [7,8,9]], {origin:{r:1, c:4}});
|
||
X.utils.sheet_add_aoa(ws, [[4,5,6,7,8,9,0]], {origin: -1});
|
||
assert.equal(X.utils.sheet_to_csv(ws).trim(), "S,h,e,e,t,J,S\n1,2,,,5,6,7\n2,3,,,6,7,8\n3,4,,,7,8,9\n4,5,6,7,8,9,0");
|
||
});
|
||
await t.step('sheet_add_aoa support object cell', async function(t) {
|
||
var data = X.utils.aoa_to_sheet([
|
||
['url', 'name', 'id'],
|
||
[ { l: { Target: 'https://123.com' }, v: 'url', t: 's' }, 'tom', 'xxx' ]
|
||
]);
|
||
if(assert.deepEqual) assert.deepEqual(data.A2, { l: { Target: 'https://123.com' }, v: 'url', t: 's' });
|
||
});
|
||
await t.step('decode_range', async function(t) {
|
||
var _c = "ABC", _r = "123", _C = "DEF", _R = "456";
|
||
|
||
var r = X.utils.decode_range(_c + _r + ":" + _C + _R);
|
||
assert.assert(r.s != r.e);
|
||
assert.equal(r.s.c, X.utils.decode_col(_c)); assert.equal(r.s.r, X.utils.decode_row(_r));
|
||
assert.equal(r.e.c, X.utils.decode_col(_C)); assert.equal(r.e.r, X.utils.decode_row(_R));
|
||
|
||
r = X.utils.decode_range(_c + _r);
|
||
assert.assert(r.s != r.e);
|
||
assert.equal(r.s.c, X.utils.decode_col(_c)); assert.equal(r.s.r, X.utils.decode_row(_r));
|
||
assert.equal(r.e.c, X.utils.decode_col(_c)); assert.equal(r.e.r, X.utils.decode_row(_r));
|
||
});
|
||
});
|
||
|
||
function coreprop(props: X.Properties) {
|
||
assert.equal(props.Title, 'Example with properties');
|
||
assert.equal(props.Subject, 'Test it before you code it');
|
||
assert.equal(props.Author, 'Pony Foo');
|
||
assert.equal(props.Manager, 'Despicable Drew');
|
||
assert.equal(props.Company, 'Vector Inc');
|
||
assert.equal(props.Category, 'Quirky');
|
||
assert.equal(props.Keywords, 'example humor');
|
||
assert.equal(props.Comments, 'some comments');
|
||
assert.equal(props.LastAuthor, 'Hugues');
|
||
}
|
||
function custprop(props: any) {
|
||
assert.equal(props['I am a boolean'], true);
|
||
assert.equal(props['Date completed'].toISOString(), '1967-03-09T16:30:00.000Z');
|
||
assert.equal(props.Status, 2);
|
||
assert.equal(props.Counter, -3.14);
|
||
}
|
||
|
||
function cmparr(x: any[]){ for(var i=1;i<x.length;++i) assert.deepEqual(x[0], x[i]); }
|
||
|
||
function deepcmp(x: any, y: any, k: string, m?: string, c: number = 0): void {
|
||
var s = k.indexOf(".");
|
||
m = (m||"") + "|" + (s > -1 ? k.substr(0,s) : k);
|
||
if(s < 0) return assert[c<0?'notEqual':'equal'](x[k], y[k], m);
|
||
return deepcmp(x[k.substr(0,s)],y[k.substr(0,s)],k.substr(s+1),m,c);
|
||
}
|
||
|
||
var styexc = [
|
||
'A2|H10|bgColor.rgb',
|
||
'F6|H1|patternType'
|
||
];
|
||
var stykeys = [
|
||
"patternType",
|
||
"fgColor.rgb",
|
||
"bgColor.rgb"
|
||
];
|
||
function diffsty(ws: X.WorkSheet, r1: string,r2: string) {
|
||
var c1 = get_cell(ws,r1).s, c2 = get_cell(ws,r2).s;
|
||
stykeys.forEach(function(m) {
|
||
var c = -1;
|
||
if(styexc.indexOf(r1+"|"+r2+"|"+m) > -1) c = 1;
|
||
else if(styexc.indexOf(r2+"|"+r1+"|"+m) > -1) c = 1;
|
||
deepcmp(c1,c2,m,r1+","+r2,c);
|
||
});
|
||
}
|
||
|
||
function hlink1(ws: X.WorkSheet) {[
|
||
["A1", "http://www.sheetjs.com"],
|
||
["A2", "http://oss.sheetjs.com"],
|
||
["A3", "http://oss.sheetjs.com#foo"],
|
||
["A4", "mailto:dev@sheetjs.com"],
|
||
["A5", "mailto:dev@sheetjs.com?subject=hyperlink"],
|
||
["A6", "../../sheetjs/Documents/Test.xlsx"],
|
||
["A7", "http://sheetjs.com", "foo bar baz"]
|
||
].forEach(function(r) {
|
||
assert.equal(get_cell(ws, r[0]).l.Target, r[1]);
|
||
if(r[2]) assert.equal(get_cell(ws, r[0]).l.Tooltip, r[2]);
|
||
}); }
|
||
|
||
function hlink2(ws: X.WorkSheet) { [
|
||
["A1", "#Sheet2!A1"],
|
||
["A2", "#WBScope"],
|
||
["A3", "#Sheet1!WSScope1", "#Sheet1!C7:E8"],
|
||
["A5", "#Sheet1!A5"]
|
||
].forEach(function(r) {
|
||
if(r[2] && get_cell(ws, r[0]).l.Target == r[2]) return;
|
||
assert.equal(get_cell(ws, r[0]).l.Target, r[1]);
|
||
}); }
|
||
|
||
function check_margin(margins: X.MarginInfo, exp: number[]) {
|
||
(["left", "right", "top", "bottom", "header", "footer"] as Array<keyof X.MarginInfo>).forEach(function(m: keyof X.MarginInfo, i: number) {
|
||
assert.equal(margins[m], exp[i]);
|
||
});
|
||
}
|
||
|
||
Deno.test('parse features', async function(t) {
|
||
await t.step('sheet visibility', async function(t) {
|
||
var wbs = SVPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE}); });
|
||
await t.step('should detect visible sheets', async function(t) {
|
||
wbs.forEach(function(wb) {
|
||
assert.assert(!wb?.Workbook?.Sheets?.[0]?.Hidden);
|
||
});
|
||
});
|
||
await t.step('should detect all hidden sheets', async function(t) {
|
||
wbs.forEach(function(wb) {
|
||
assert.assert(wb?.Workbook?.Sheets?.[1]?.Hidden);
|
||
assert.assert(wb?.Workbook?.Sheets?.[2]?.Hidden);
|
||
});
|
||
});
|
||
await t.step('should distinguish very hidden sheets', async function(t) {
|
||
wbs.forEach(function(wb) {
|
||
assert.equal(wb?.Workbook?.Sheets?.[1]?.Hidden,1);
|
||
assert.equal(wb?.Workbook?.Sheets?.[2]?.Hidden,2);
|
||
});
|
||
});
|
||
});
|
||
|
||
await t.step('comments', async function(t) {
|
||
if(fs.existsSync(paths.swcxlsx)) await t.step('should have comment as part of cell properties', async function(t){
|
||
var sheet = 'Sheet1';
|
||
var wb1=X.read(fs.readFileSync(paths.swcxlsx), {type:TYPE});
|
||
var wb2=X.read(fs.readFileSync(paths.swcxlsb), {type:TYPE});
|
||
var wb3=X.read(fs.readFileSync(paths.swcxls), {type:TYPE});
|
||
var wb4=X.read(fs.readFileSync(paths.swcxml), {type:TYPE});
|
||
|
||
[wb1,wb2,wb3,wb4].map(function(wb: X.WorkBook) { return wb.Sheets[sheet]; }).forEach(function(ws, i) {
|
||
assert.equal(get_cell(ws, "B1").c.length, 1,"must have 1 comment");
|
||
assert.equal(get_cell(ws, "B1").c[0].a, "Yegor Kozlov","must have the same author");
|
||
assert.equal(get_cell(ws, "B1").c[0].t, "Yegor Kozlov:\nfirst cell", "must have the concatenated texts");
|
||
if(i > 0) return;
|
||
assert.equal(get_cell(ws, "B1").c[0].r, '<r><rPr><b/><sz val="8"/><color indexed="81"/><rFont val="Tahoma"/></rPr><t>Yegor Kozlov:</t></r><r><rPr><sz val="8"/><color indexed="81"/><rFont val="Tahoma"/></rPr><t xml:space="preserve">\r\nfirst cell</t></r>', "must have the rich text representation");
|
||
assert.equal(get_cell(ws, "B1").c[0].h, '<span style="font-size:8pt;"><b>Yegor Kozlov:</b></span><span style="font-size:8pt;"><br/>first cell</span>', "must have the html representation");
|
||
});
|
||
});
|
||
var stst = [
|
||
['xlsx', paths.cstxlsx],
|
||
['xlsb', paths.cstxlsb],
|
||
['xls', paths.cstxls],
|
||
['xlml', paths.cstxml],
|
||
['ods', paths.cstods]
|
||
]; for(var ststi = 0; ststi < stst.length; ++ststi) { let m = stst[ststi]; await t.step(m[0] + ' stress test', async function(t) {
|
||
var wb = X.read(fs.readFileSync(m[1]), {type:TYPE});
|
||
check_comments(wb);
|
||
var ws0 = wb.Sheets["Sheet2"];
|
||
assert.equal(get_cell(ws0,"A1").c[0].a, 'Author');
|
||
assert.equal(get_cell(ws0,"A1").c[0].t, 'Author:\nGod thinks this is good');
|
||
assert.equal(get_cell(ws0,"C1").c[0].a, 'Author');
|
||
assert.equal(get_cell(ws0,"C1").c[0].t, 'I really hope that xlsx decides not to use magic like rPr');
|
||
}); }
|
||
});
|
||
|
||
await t.step('should parse core properties and custom properties', async function(t) {
|
||
var wbs = [
|
||
X.read(fs.readFileSync(paths.cpxlsx), {type:TYPE, WTF:true}),
|
||
X.read(fs.readFileSync(paths.cpxlsb), {type:TYPE, WTF:true}),
|
||
X.read(fs.readFileSync(paths.cpxls), {type:TYPE, WTF:true}),
|
||
X.read(fs.readFileSync(paths.cpxml), {type:TYPE, WTF:true})
|
||
];
|
||
|
||
var s1 = ['XLSX', 'XLSB', 'XLS', 'XML']; for(var i = 0; i < s1.length; ++i) { let x = s1[i];
|
||
await t.step(x + ' should parse core properties', async function(t) { var P = wbs?.[i]?.Props; if(typeof P == "undefined") throw "missing props"; coreprop(P); });
|
||
await t.step(x + ' should parse custom properties', async function(t) { custprop(wbs?.[i]?.Custprops); });
|
||
}
|
||
var s2 = [
|
||
["asxls", "BIFF8", "\u2603"],
|
||
["asxls5", "BIFF5", "_"],
|
||
["asxml", "XLML", "\u2603"],
|
||
["asods", "ODS", "God"],
|
||
["asxlsx", "XLSX", "\u2603"],
|
||
["asxlsb", "XLSB", "\u2603"]
|
||
]; for(var ii = 0; ii < s2.length; ++ii) { let x = s2[ii];
|
||
await t.step(x[1] + ' should read ' + (x[2] == "\u2603" ? 'unicode ' : "") + 'author', async function(t) {
|
||
var wb = X.read(fs.readFileSync(paths[x[0]]), {type:TYPE});
|
||
assert.equal(wb?.Props?.Author, x[2]);
|
||
}); }
|
||
var BASE = "இராமா";
|
||
/* TODO: ODS, XLS */
|
||
var s3:Array<X.BookType> = [ "xlsx", "xlsb", "xlml"/*, "ods", "xls" */]; for(var i = 0; i < s3.length; ++i) { let n = s3[i];
|
||
await t.step(n + ' should round-trip unicode category', async function(t) {
|
||
var wb = X.utils.book_new();
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([["a"]]), "Sheet1");
|
||
if(!wb.Props) wb.Props = {};
|
||
(wb.Props || (wb.Props = {})).Category = BASE;
|
||
var wb2 = X.read(X.write(wb, {bookType:n, type:TYPE}), {type:TYPE});
|
||
assert.equal(wb2?.Props?.Category,BASE);
|
||
}); }
|
||
});
|
||
|
||
await t.step('sheetRows', async function(t) {
|
||
await t.step('should use original range if not set', async function(t) {
|
||
var opts: X.ParsingOptions = {type:TYPE};
|
||
FSTPaths.map(function(p) { return X.read(fs.readFileSync(p), opts); }).forEach(function(wb) {
|
||
assert.equal(wb.Sheets["Text"]["!ref"],"A1:F49");
|
||
});
|
||
});
|
||
await t.step('should adjust range if set', async function(t) {
|
||
var opts = {type:TYPE, sheetRows:10};
|
||
var wbs = FSTPaths.map(function(p) { return X.read(fs.readFileSync(p), opts); });
|
||
/* TODO: XLS, XML, ODS */
|
||
wbs.slice(0,2).forEach(function(wb) {
|
||
assert.equal(wb.Sheets["Text"]["!fullref"],"A1:F49");
|
||
assert.equal(wb.Sheets["Text"]["!ref"],"A1:F10");
|
||
});
|
||
});
|
||
await t.step('should not generate comment cells', async function(t) {
|
||
var opts = {type:TYPE, sheetRows:10};
|
||
var wbs = CSTPaths.map(function(p) { return X.read(fs.readFileSync(p), opts); });
|
||
/* TODO: XLS, XML, ODS */
|
||
wbs.slice(0,2).forEach(function(wb) {
|
||
assert.equal(wb.Sheets["Sheet7"]["!fullref"],"A1:N34");
|
||
assert.equal(wb.Sheets["Sheet7"]["!ref"],"A1");
|
||
});
|
||
});
|
||
});
|
||
|
||
await t.step('column properties', async function(t) {
|
||
var wbs: X.WorkBook[] = [], wbs_no_slk: X.WorkBook[] = [];
|
||
wbs = CWPaths.map(function(n) { return X.read(fs.readFileSync(n), {type:TYPE, cellStyles:true}); });
|
||
wbs_no_slk = wbs.slice(0, 5);
|
||
await t.step('should have "!cols"', async function(t) {
|
||
wbs.forEach(function(wb) { assert.assert(wb.Sheets["Sheet1"]['!cols']); });
|
||
});
|
||
await t.step('should have correct widths', async function(t) {
|
||
/* SYLK rounds wch so skip non-integral */
|
||
wbs_no_slk.map(function(x) { return x.Sheets["Sheet1"]['!cols']; }).forEach(function(x) {
|
||
assert.equal(x?.[1]?.width, 0.1640625);
|
||
assert.equal(x?.[2]?.width, 16.6640625);
|
||
assert.equal(x?.[3]?.width, 1.6640625);
|
||
});
|
||
wbs.map(function(x) { return x.Sheets["Sheet1"]['!cols']; }).forEach(function(x) {
|
||
assert.equal(x?.[4]?.width, 4.83203125);
|
||
assert.equal(x?.[5]?.width, 8.83203125);
|
||
assert.equal(x?.[6]?.width, 12.83203125);
|
||
assert.equal(x?.[7]?.width, 16.83203125);
|
||
});
|
||
});
|
||
await t.step('should have correct pixels', async function(t) {
|
||
/* SYLK rounds wch so skip non-integral */
|
||
wbs_no_slk.map(function(x) { return x.Sheets["Sheet1"]['!cols']; }).forEach(function(x) {
|
||
assert.equal(x?.[1].wpx, 1);
|
||
assert.equal(x?.[2].wpx, 100);
|
||
assert.equal(x?.[3].wpx, 10);
|
||
});
|
||
wbs.map(function(x) { return x.Sheets["Sheet1"]['!cols']; }).forEach(function(x) {
|
||
assert.equal(x?.[4].wpx, 29);
|
||
assert.equal(x?.[5].wpx, 53);
|
||
assert.equal(x?.[6].wpx, 77);
|
||
assert.equal(x?.[7].wpx, 101);
|
||
});
|
||
});
|
||
});
|
||
|
||
await t.step('row properties', async function(t) {
|
||
var wbs = RHPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE, cellStyles:true}); });
|
||
/* */
|
||
var ols = OLPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE, cellStyles:true}); });
|
||
await t.step('should have "!rows"', async function(t) {
|
||
wbs.forEach(function(wb) { assert.assert(wb.Sheets["Sheet1"]['!rows']); });
|
||
});
|
||
await t.step('should have correct points', async function(t) {
|
||
wbs.map(function(x) { return x.Sheets["Sheet1"]['!rows']; }).forEach(function(x) {
|
||
assert.equal(x?.[1].hpt, 1);
|
||
assert.equal(x?.[2].hpt, 10);
|
||
assert.equal(x?.[3].hpt, 100);
|
||
});
|
||
});
|
||
await t.step('should have correct pixels', async function(t) {
|
||
wbs.map(function(x) { return x.Sheets["Sheet1"]['!rows']; }).forEach(function(x) {
|
||
/* note: at 96 PPI hpt == hpx */
|
||
assert.equal(x?.[1].hpx, 1);
|
||
assert.equal(x?.[2].hpx, 10);
|
||
assert.equal(x?.[3].hpx, 100);
|
||
});
|
||
});
|
||
await t.step('should have correct outline levels', async function(t) {
|
||
ols.map(function(x) { return x.Sheets["Sheet1"]; }).forEach(function(ws) {
|
||
var rows = ws['!rows'];
|
||
for(var i = 0; i < 29; ++i) {
|
||
var cell = get_cell(ws, "A" + X.utils.encode_row(i));
|
||
var lvl = (rows?.[i]||{}).level||0;
|
||
if(!cell || cell.t == 's') assert.equal(lvl, 0);
|
||
else if(cell.t == 'n') {
|
||
if(cell.v === 0) assert.equal(lvl, 0);
|
||
else assert.equal(lvl, cell.v);
|
||
}
|
||
}
|
||
assert.equal(rows?.[29]?.level, 7);
|
||
});
|
||
});
|
||
});
|
||
|
||
await t.step('merge cells',async function(t) {
|
||
var wbs = MCPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE}); });
|
||
await t.step('should have !merges', async function(t) {
|
||
wbs.forEach(function(wb) {
|
||
assert.assert(wb.Sheets["Merge"]['!merges']);
|
||
});
|
||
var m = wbs.map(function(x) { return x.Sheets["Merge"]?.['!merges']?.map(function(y) { return X.utils.encode_range(y); });});
|
||
m?.slice(1)?.forEach(function(x) {
|
||
assert.deepEqual(m?.[0]?.sort(),x?.sort());
|
||
});
|
||
});
|
||
});
|
||
|
||
await t.step('should find hyperlinks', async function(t) {
|
||
var wb1 = HLPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE, WTF:true}); });
|
||
var wb2 = ILPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE, WTF:true}); });
|
||
|
||
var ext1 = ['xlsx', 'xlsb', 'xls', 'xml']; for(let i = 0; i < ext1.length; ++i) { let x = ext1[i];
|
||
await t.step(x + " external", async function(t) { hlink1(wb1[i].Sheets["Sheet1"]); });
|
||
}
|
||
var ext2 = ['xlsx', 'xlsb', 'xls', 'xml', 'ods']; for(let i = 0; i < ext2.length; ++i) { let x = ext2[i];
|
||
await t.step(x + " internal", async function(t) { hlink2(wb2[i].Sheets["Sheet1"]); });
|
||
}
|
||
});
|
||
|
||
await t.step('should parse cells with date type (XLSX/XLSM)', async function(t) {
|
||
await t.step('Must have read the date', async function(t) {
|
||
var wb: X.WorkBook, ws: X.WorkSheet;
|
||
var sheetName = 'Sheet1';
|
||
wb = X.read(fs.readFileSync(paths.dtxlsx), {type:TYPE});
|
||
ws = wb.Sheets[sheetName];
|
||
var sheet: Array<any> = X.utils.sheet_to_json(ws, {raw: false});
|
||
assert.equal(sheet[3]['てすと'], '2/14/14');
|
||
});
|
||
await t.step('cellDates should not affect formatted text', async function(t) {
|
||
var sheetName = 'Sheet1';
|
||
var ws1 = X.read(fs.readFileSync(paths.dtxlsx), {type:TYPE}).Sheets[sheetName];
|
||
var ws2 = X.read(fs.readFileSync(paths.dtxlsb), {type:TYPE}).Sheets[sheetName];
|
||
assert.equal(X.utils.sheet_to_csv(ws1),X.utils.sheet_to_csv(ws2));
|
||
});
|
||
});
|
||
|
||
await t.step('cellDates', async function(t) {
|
||
var fmts = [
|
||
/* desc path sheet cell formatted */
|
||
['XLSX', paths.dtxlsx, 'Sheet1', 'B5', '2/14/14'],
|
||
['XLSB', paths.dtxlsb, 'Sheet1', 'B5', '2/14/14'],
|
||
['XLS', paths.dtxls, 'Sheet1', 'B5', '2/14/14'],
|
||
['XLML', paths.dtxml, 'Sheet1', 'B5', '2/14/14'],
|
||
['XLSM', paths.nfxlsx, 'Implied', 'B13', '18-Oct-33']
|
||
];
|
||
await t.step('should not generate date cells by default', async function(t) { fmts.forEach(function(f) {
|
||
var wb, ws;
|
||
wb = X.read(fs.readFileSync(f[1]), {type:TYPE});
|
||
ws = wb.Sheets[f[2]];
|
||
assert.equal(get_cell(ws, f[3]).w, f[4]);
|
||
assert.equal(get_cell(ws, f[3]).t, 'n');
|
||
}); });
|
||
await t.step('should generate date cells if cellDates is true', async function(t) { fmts.forEach(function(f) {
|
||
var wb, ws;
|
||
wb = X.read(fs.readFileSync(f[1]), {type:TYPE, cellDates:true});
|
||
ws = wb.Sheets[f[2]];
|
||
assert.equal(get_cell(ws, f[3]).w, f[4]);
|
||
assert.equal(get_cell(ws, f[3]).t, 'd');
|
||
}); });
|
||
});
|
||
|
||
await t.step('defined names', async function(t) {var dnp = [
|
||
/* desc path cmnt */
|
||
['xlsx', paths.dnsxlsx, true],
|
||
['xlsb', paths.dnsxlsb, true],
|
||
['xls', paths.dnsxls, true],
|
||
['xlml', paths.dnsxml, false],
|
||
['slk', paths.dnsslk, false]
|
||
] as Array<[string, string, boolean]>; for(var i = 0; i < dnp.length; ++i) { let m: [string, string, boolean] = dnp[i]; await t.step(m[0], async function(t) {
|
||
var wb = X.read(fs.readFileSync(m[1]), {type:TYPE});
|
||
var names = wb?.Workbook?.Names;
|
||
|
||
if(names) {
|
||
if(m[0] != 'slk') {
|
||
for(var i = 0; i < names?.length; ++i) if(names[i].Name == "SheetJS") break;
|
||
assert.assert(i < names?.length, "Missing name");
|
||
assert.equal(names[i].Sheet, void 0);
|
||
assert.equal(names[i].Ref, "Sheet1!$A$1");
|
||
if(m[2]) assert.equal(names[i].Comment, "defined names just suck excel formulae are bad MS should feel bad");
|
||
}
|
||
|
||
for(i = 0; i < names.length; ++i) if(names[i].Name == "SHEETjs") break;
|
||
assert.assert(i < names.length, "Missing name");
|
||
assert.equal(names[i].Sheet, 0);
|
||
assert.equal(names[i].Ref, "Sheet1!$A$2");
|
||
}
|
||
}); } });
|
||
|
||
await t.step('defined names unicode', async function(t) {var dnu=[
|
||
/* desc path RT */
|
||
['xlsx', paths.dnuxlsx, true],
|
||
['xlsb', paths.dnuxlsb, true],
|
||
['ods', paths.dnuods, true],
|
||
['xls', paths.dnuxls, false],
|
||
['xlml', paths.dnuxml, false]
|
||
]; for(var i = 0; i < dnu.length; ++i) { let m = dnu[i]; await t.step(m[0], async function(t) {
|
||
var wb = X.read(fs.readFileSync(m[1]), {type:TYPE});
|
||
var wb2 = X.read(X.write(wb, {type:TYPE, bookType: m[0]}), {type:TYPE});
|
||
[
|
||
"NoContainsJapanese",
|
||
"\u65E5\u672C\u8a9e\u306e\u307f",
|
||
"sheet\u65e5\u672c\u8a9e",
|
||
"\u65e5\u672c\u8a9esheet",
|
||
"sheet\u65e5\u672c\u8a9esheet"
|
||
].forEach(function(n, i) { assert.equal(wb.SheetNames[i], n); });
|
||
[
|
||
["name\u65e5\u672c\u8a9e", "sheet\u65e5\u672c\u8a9e!$A$1"],
|
||
["name\u65e5\u672c\u8a9ename", "sheet\u65e5\u672c\u8a9esheet!$B$2"],
|
||
["NoContainsJapaneseName", "\u65e5\u672c\u8a9e\u306e\u307f!$A$1"],
|
||
["sheet\u65e5\u672c\u8a9e", "sheet\u65e5\u672c\u8a9e!$A$1"],
|
||
["\u65e5\u672c\u8a9e", "NoContainsJapanese!$A$1"],
|
||
["\u65e5\u672c\u8a9ename", "\u65e5\u672c\u8a9esheet!$I$2"]
|
||
].forEach(function(n) {(m[2] ? [ wb, wb2 ] : [ wb ]).forEach(function(wb) {
|
||
var DN = null;
|
||
var arr = wb?.Workbook?.Names;
|
||
if(arr) for(var j = 0; j < arr.length; ++j) if(arr[j]?.Name == n?.[0]) DN = arr[j];
|
||
assert.assert(DN);
|
||
// $FlowIgnore
|
||
assert.equal(DN?.Ref, n?.[1]);
|
||
}); });
|
||
}); } });
|
||
|
||
await t.step('workbook codename unicode', async function(t) {
|
||
var wb = X.utils.book_new();
|
||
var ws = X.utils.aoa_to_sheet([[1]]);
|
||
X.utils.book_append_sheet(wb, ws, "Sheet1");
|
||
wb.Workbook = { WBProps: { CodeName: "本工作簿" } };
|
||
var exts = ['xlsx', 'xlsb'] as Array<X.BookType>; for(var i = 0; i < exts.length; ++i) { var m = exts[i]; await t.step(m, async function(t) {
|
||
var bstr = X.write(wb, {type: "binary", bookType: m});
|
||
var nwb = X.read(bstr, {type: "binary"});
|
||
assert.equal(nwb?.Workbook?.WBProps?.CodeName, wb?.Workbook?.WBProps?.CodeName);
|
||
}); }
|
||
});
|
||
|
||
await t.step('auto filter', async function(t) {var af = [
|
||
['xlsx', paths.afxlsx],
|
||
['xlsb', paths.afxlsb],
|
||
['xls', paths.afxls],
|
||
['xlml', paths.afxml],
|
||
['ods', paths.afods]
|
||
]; for(var j = 0; j < af.length; ++j) { var m = af[j]; await t.step(m[0], async function(t) {
|
||
var wb = X.read(fs.readFileSync(m[1]), {type:TYPE});
|
||
assert.assert(!wb.Sheets[wb.SheetNames[0]]['!autofilter']);
|
||
for(var i = 1; i < wb.SheetNames.length; ++i) {
|
||
assert.assert(wb.Sheets[wb.SheetNames[i]]['!autofilter']);
|
||
assert.equal(wb.Sheets[wb.SheetNames[i]]?.['!autofilter']?.ref,"A1:E22");
|
||
}
|
||
}); } });
|
||
|
||
await t.step('HTML', async function(t) {
|
||
var ws = X.utils.aoa_to_sheet([
|
||
["a","b","c"],
|
||
["&","<",">","\n"]
|
||
]);
|
||
var wb = {SheetNames:["Sheet1"],Sheets:{Sheet1:ws}};
|
||
var m: 'xlsx' = 'xlsx'; await t.step(m, async function(t) {
|
||
var wb2 = X.read(X.write(wb, {bookType:m, type:TYPE}),{type:TYPE, cellHTML:true});
|
||
assert.equal(get_cell(wb2.Sheets["Sheet1"], "A2").h, "&");
|
||
assert.equal(get_cell(wb2.Sheets["Sheet1"], "B2").h, "<");
|
||
assert.equal(get_cell(wb2.Sheets["Sheet1"], "C2").h, ">");
|
||
var h = get_cell(wb2.Sheets["Sheet1"], "D2").h;
|
||
assert.assert(h == "
" || h == "<br/>");
|
||
});
|
||
});
|
||
|
||
await t.step('page margins', async function(t) {
|
||
var wbs = PMPaths.map(function(p) { return X.read(fs.readFileSync(p), {type:TYPE, WTF:true}); });
|
||
var pm = ([
|
||
/* Sheet Name Margins: left right top bottom head foot */
|
||
["Normal", [0.70, 0.70, 0.75, 0.75, 0.30, 0.30]],
|
||
["Wide", [1.00, 1.00, 1.00, 1.00, 0.50, 0.50]],
|
||
["Narrow", [0.25, 0.25, 0.75, 0.75, 0.30, 0.30]],
|
||
["Custom 1 Inch Centered", [1.00, 1.00, 1.00, 1.00, 0.30, 0.30]],
|
||
["1 Inch HF", [0.70, 0.70, 0.75, 0.75, 1.00, 1.00]]
|
||
] as Array<[string, number[]]>); for(var pmi = 0; pmi < pm.length; ++pmi) { let tt = pm[pmi]; await t.step('should parse ' + tt[0] + ' margin', async function(t) { wbs.forEach(function(wb) {
|
||
check_margin(wb.Sheets[""+tt[0]]?.["!margins"] as any, tt[1]);
|
||
}); }); }
|
||
});
|
||
|
||
await t.step('should correctly handle styles', async function(t) {
|
||
var wsxls=X.read(fs.readFileSync(paths.cssxls), {type:TYPE,cellStyles:true,WTF:true}).Sheets["Sheet1"];
|
||
var wsxlsx=X.read(fs.readFileSync(paths.cssxlsx), {type:TYPE,cellStyles:true,WTF:true}).Sheets["Sheet1"];
|
||
var rn = function(range: string): string[] {
|
||
var r = X.utils.decode_range(range);
|
||
var out: string[] = [];
|
||
for(var R = r.s.r; R <= r.e.r; ++R) for(var C = r.s.c; C <= r.e.c; ++C)
|
||
out.push(X.utils.encode_cell({c:C,r:R}));
|
||
return out;
|
||
};
|
||
var rn2 = function(r: string): string[] { return ([] as string[]).concat.apply(([] as string[]), r.split(",").map(rn)); };
|
||
var ranges = [
|
||
'A1:D1,F1:G1', 'A2:D2,F2:G2', /* rows */
|
||
'A3:A10', 'B3:B10', 'E1:E10', 'F6:F8', /* cols */
|
||
'H1:J4', 'H10' /* blocks */
|
||
];
|
||
/*eslint-disable */
|
||
var exp/*:Array<any>*/ = [
|
||
{ patternType: 'darkHorizontal',
|
||
fgColor: { theme: 9, raw_rgb: 'F79646' },
|
||
bgColor: { theme: 5, raw_rgb: 'C0504D' } },
|
||
{ patternType: 'darkUp',
|
||
fgColor: { theme: 3, raw_rgb: '1F497D' },
|
||
bgColor: { theme: 7, raw_rgb: '8064A2' } },
|
||
{ patternType: 'darkGray',
|
||
fgColor: { theme: 3, raw_rgb: '1F497D' },
|
||
bgColor: { theme: 1, raw_rgb: '000000' } },
|
||
{ patternType: 'lightGray',
|
||
fgColor: { theme: 6, raw_rgb: '9BBB59' },
|
||
bgColor: { theme: 2, raw_rgb: 'EEECE1' } },
|
||
{ patternType: 'lightDown',
|
||
fgColor: { theme: 4, raw_rgb: '4F81BD' },
|
||
bgColor: { theme: 7, raw_rgb: '8064A2' } },
|
||
{ patternType: 'lightGrid',
|
||
fgColor: { theme: 6, raw_rgb: '9BBB59' },
|
||
bgColor: { theme: 9, raw_rgb: 'F79646' } },
|
||
{ patternType: 'lightGrid',
|
||
fgColor: { theme: 4, raw_rgb: '4F81BD' },
|
||
bgColor: { theme: 2, raw_rgb: 'EEECE1' } },
|
||
{ patternType: 'lightVertical',
|
||
fgColor: { theme: 3, raw_rgb: '1F497D' },
|
||
bgColor: { theme: 7, raw_rgb: '8064A2' } }
|
||
];
|
||
/*eslint-enable */
|
||
for(var rangei = 0; rangei < ranges.length; ++rangei) { let rng = ranges[rangei];
|
||
await t.step('XLS | ' + rng,async function(t){cmparr(rn2(rng).map(function(x){ return get_cell(wsxls,x).s; }));});
|
||
await t.step('XLSX | ' + rng,async function(t){cmparr(rn2(rng).map(function(x){ return get_cell(wsxlsx,x).s; }));});
|
||
}
|
||
await t.step('different styles', async function(t) {
|
||
for(var i = 0; i != ranges.length-1; ++i) {
|
||
for(var j = i+1; j != ranges.length; ++j) {
|
||
diffsty(wsxlsx, rn2(ranges[i])[0], rn2(ranges[j])[0]);
|
||
/* TODO: XLS */
|
||
//diffsty(wsxls, rn2(ranges[i])[0], rn2(ranges[j])[0]);
|
||
}
|
||
}
|
||
});
|
||
await t.step('correct styles', async function(t) {
|
||
//var stylesxls = ranges.map(function(r) { return rn2(r)[0]; }).map(function(r) { return get_cell(wsxls,r).s; });
|
||
var stylesxlsx = ranges.map(function(r) { return rn2(r)[0]; }).map(function(r) { return get_cell(wsxlsx,r).s; });
|
||
exp.forEach(function(e, i) {
|
||
[
|
||
"fgColor.theme","fgColor.raw_rgb",
|
||
"bgColor.theme","bgColor.raw_rgb",
|
||
"patternType"
|
||
].forEach(function(k) {
|
||
deepcmp(e, stylesxlsx[i], k, i + ":" + k, 0);
|
||
/* TODO: XLS */
|
||
//deepcmp(e, stylesxls[i], k, i + ":" + k, 0);
|
||
});
|
||
});
|
||
});
|
||
});
|
||
|
||
await t.step('data types formats', async function(t) {var dtf = [
|
||
['xlsx', paths.dtfxlsx],
|
||
]; for(var j = 0; j < dtf.length; ++j) { var m = dtf[j]; await t.step(m[0], async function(t) {
|
||
var wb = X.read(fs.readFileSync(m[1]), {type: TYPE, cellDates: true});
|
||
var ws = wb.Sheets[wb.SheetNames[0]];
|
||
var data = X.utils.sheet_to_json<any>(ws, { header: 1, raw: true, rawNumbers: false });
|
||
assert.assert(data[0][1] instanceof Date);
|
||
assert.assert(data[1][1] instanceof Date);
|
||
assert.equal(data[2][1], '$123.00');
|
||
assert.equal(data[3][1], '98.76%');
|
||
assert.equal(data[4][1], '456.00');
|
||
assert.equal(data[5][1], '7,890');
|
||
}); } });
|
||
|
||
await t.step('date system', async function(t) {[
|
||
"biff5", "ods", "slk", "xls", "xlsb", "xlsx", "xml"
|
||
].forEach(function(ext) {
|
||
// TODO: verify actual date values
|
||
var wb0 = X.read(fs.readFileSync("./test_files/1904/1900." + ext), {type: TYPE, cellNF: true});
|
||
assert.assert(!(wb0?.Workbook?.WBProps?.date1904));
|
||
assert.equal(X.utils.sheet_to_csv(wb0.Sheets[wb0.SheetNames[0]]), [
|
||
"1,1900-01-01,1900-01-01,1900-01-01",
|
||
"11,1900-01-11,1900-01-11,1900-01-11",
|
||
"111,1900-04-20,1900-04-20,1900-04-20",
|
||
"1111,1903-01-15,1903-01-15,1903-01-15",
|
||
"11111,1930-06-02,1930-06-02,1930-06-02"
|
||
].join("\n"));
|
||
|
||
var wb4 = X.read(fs.readFileSync("./test_files/1904/1904." + ext), {type: TYPE, cellNF: true});
|
||
assert.assert(wb4?.Workbook?.WBProps?.date1904);
|
||
assert.equal(X.utils.sheet_to_csv(wb4.Sheets[wb4.SheetNames[0]]), [
|
||
"1,1904-01-02,1904-01-02,1904-01-02",
|
||
"11,1904-01-12,1904-01-12,1904-01-12",
|
||
"111,1904-04-21,1904-04-21,1904-04-21",
|
||
"1111,1907-01-16,1907-01-16,1907-01-16",
|
||
"11111,1934-06-03,1934-06-03,1934-06-03"
|
||
].join("\n"));
|
||
}); });
|
||
|
||
await t.step('bookType metadata', async function(t) {
|
||
([
|
||
// TODO: keep in sync with BookType, support other formats
|
||
"xlsx"/*, "xlsm" */, "xlsb"/* xls / xla / biff# */, "xlml", "ods", "fods"/*, "csv", "txt", */, "sylk", "html", "dif", "rtf"/*, "prn", "eth"*/, "dbf", "numbers"
|
||
] as X.BookType[]).forEach(function(r: X.BookType) {
|
||
if(r == "numbers" && !can_write_numbers) return;
|
||
var ws = X.utils.aoa_to_sheet([ ["a", "b", "c"], [1, 2, 3] ]);
|
||
var wb = X.utils.book_new(); X.utils.book_append_sheet(wb, ws, "Sheet1");
|
||
var data = X.write(wb, {type: TYPE, bookType: r, WTF: true, numbers:XLSX_ZAHL });
|
||
assert.equal(X.read(data, {type: TYPE, WTF: true}).bookType, r);
|
||
}); });
|
||
});
|
||
|
||
Deno.test('write features', async function(t) {
|
||
await t.step('props', async function(t) {
|
||
await t.step('core', async function(t) {
|
||
var baseprops: X.FullProperties = {
|
||
Category: "Newspaper",
|
||
ContentStatus: "Published",
|
||
Keywords: "☃",
|
||
LastAuthor: "Perry White",
|
||
LastPrinted: "1978-12-15",
|
||
//RevNumber: 6969, // TODO: this should actually be Revision
|
||
//AppVersion: 69, // TODO: this should actually be a number
|
||
Author: "Lois Lane",
|
||
Comments: "Needs work",
|
||
Identifier: "1d",
|
||
Language: "English",
|
||
Subject: "Superman",
|
||
Title: "Man of Steel"
|
||
};
|
||
var ws = X.utils.aoa_to_sheet([["a","b","c"],[1,2,3]]);
|
||
var bt1 = (['xlml', 'xlsx', 'xlsb'] as Array<X.BookType>); for(var bti = 0; bti < bt1.length; ++bti) { let w: X.BookType = bt1[bti]; await t.step(w, async function(t) {
|
||
var wb: X.WorkBook = {
|
||
Props: ({} as X.FullProperties),
|
||
SheetNames: ["Sheet1"],
|
||
Sheets: {Sheet1: ws}
|
||
};
|
||
(Object.keys(baseprops) as Array<keyof X.FullProperties>).forEach(function(k: keyof X.FullProperties) { if(wb.Props) (wb.Props as any)[k] = baseprops[k]; });
|
||
var wb2 = X.read(X.write(wb, {bookType:w, type:TYPE}), {type:TYPE});
|
||
(Object.keys(baseprops) as Array<keyof X.FullProperties>).forEach(function(k) { assert.equal(baseprops[k], wb2?.Props?.[k]); });
|
||
var wb3 = X.read(X.write(wb2, {bookType:w, type:TYPE, Props: {Author:"SheetJS"}}), {type:TYPE});
|
||
assert.equal("SheetJS", wb3?.Props?.Author);
|
||
}); }
|
||
});
|
||
});
|
||
await t.step('HTML', async function(t) {
|
||
await t.step('should use `h` value when present', async function(t) {
|
||
var sheet = X.utils.aoa_to_sheet([["abc"]]);
|
||
get_cell(sheet, "A1").h = "<b>abc</b>";
|
||
var wb = {SheetNames:["Sheet1"], Sheets:{Sheet1:sheet}};
|
||
var str = X.write(wb, {bookType:"html", type:"binary"});
|
||
assert.assert(str.indexOf("<b>abc</b>") > 0);
|
||
});
|
||
});
|
||
await t.step('sheet range limits', async function(t) { var b = ([
|
||
["biff2", "IV16384"],
|
||
["biff5", "IV16384"],
|
||
["biff8", "IV65536"],
|
||
["xlsx", "XFD1048576"],
|
||
["xlsb", "XFD1048576"]
|
||
] as Array<['biff2' | 'biff5' | 'biff8' | 'xlsx' | 'xlsb', string]>); for(var j = 0; j < b.length; ++j) { var r = b[j]; await t.step(r[0], async function(t) {
|
||
var C = X.utils.decode_cell(r[1]);
|
||
var wopts: X.WritingOptions = {bookType:r[0], type:'binary', WTF:true};
|
||
var wb = { SheetNames: ["Sheet1"], Sheets: { Sheet1: ({} as X.WorkSheet) } };
|
||
|
||
wb.Sheets["Sheet1"]['!ref'] = "A1:" + X.utils.encode_cell({r:0, c:C.c});
|
||
X.write(wb, wopts);
|
||
wb.Sheets["Sheet1"]['!ref'] = "A" + X.utils.encode_row(C.r - 5) + ":" + X.utils.encode_cell({r:C.r, c:0});
|
||
X.write(wb, wopts);
|
||
|
||
wb.Sheets["Sheet1"]['!ref'] = "A1:" + X.utils.encode_cell({r:0, c:C.c+1});
|
||
assert.throws(function() { X.write(wb, wopts); });
|
||
wb.Sheets["Sheet1"]['!ref'] = "A" + X.utils.encode_row(C.r - 5) + ":" + X.utils.encode_cell({r:C.r+1, c:0});
|
||
assert.throws(function() { X.write(wb, wopts); });
|
||
}); } });
|
||
await t.step('single worksheet formats', async function(t) {
|
||
var wb = X.utils.book_new();
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[1,2],[3,4]]), "Sheet1");
|
||
X.utils.book_append_sheet(wb, X.utils.aoa_to_sheet([[5,6],[7,8]]), "Sheet2");
|
||
assert.equal(X.write(wb, {type:"string", bookType:"csv", sheet:"Sheet1"}), "1,2\n3,4");
|
||
assert.equal(X.write(wb, {type:"string", bookType:"csv", sheet:"Sheet2"}), "5,6\n7,8");
|
||
assert.throws(function() { X.write(wb, {type:"string", bookType:"csv", sheet:"Sheet3"}); });
|
||
});
|
||
await t.step('should create/update autofilter defined name on write', async function(t) {([
|
||
"xlsx", "xlsb", /* "xls", */ "xlml" /*, "ods" */
|
||
] as Array<X.BookType>).forEach(function(fmt) {
|
||
var wb = X.utils.book_new();
|
||
var ws = X.utils.aoa_to_sheet([["A","B","C"],[1,2,3]]);
|
||
ws["!autofilter"] = { ref: (ws["!ref"] as string) };
|
||
X.utils.book_append_sheet(wb, ws, "Sheet1");
|
||
var wb2 = X.read(X.write(wb, {bookType:fmt, type:TYPE}), {type:TYPE});
|
||
var Name: X.DefinedName = (null as any);
|
||
((wb2.Workbook||{}).Names || []).forEach(function(dn) { if(dn.Name == "_xlnm._FilterDatabase" && dn.Sheet == 0) Name = dn; });
|
||
assert.assert(!!Name, "Could not find _xlnm._FilterDatabases name WR");
|
||
assert.equal(Name.Ref, "Sheet1!$A$1:$C$2");
|
||
X.utils.sheet_add_aoa(wb2.Sheets["Sheet1"], [[4,5,6]], { origin: -1 });
|
||
(wb2.Sheets["Sheet1"]["!autofilter"] as any).ref = wb2.Sheets["Sheet1"]["!ref"];
|
||
var wb3 = X.read(X.write(wb2, {bookType:fmt, type:TYPE}), {type:TYPE});
|
||
Name = (null as any);
|
||
((wb3.Workbook||{}).Names || []).forEach(function(dn) { if(dn.Name == "_xlnm._FilterDatabase" && dn.Sheet == 0) Name = dn; });
|
||
assert.assert(!!Name, "Could not find _xlnm._FilterDatabases name WRWR");
|
||
assert.equal(Name.Ref, "Sheet1!$A$1:$C$3");
|
||
assert.equal(((wb2.Workbook as any).Names as any).length, ((wb3.Workbook as any).Names as any).length);
|
||
}); });
|
||
await t.step('should handle non-string values for "s" cells', async function(t) {([
|
||
"xlsx", "xlsb", "xls", "biff5", "biff2", "xlml", "numbers", "ods", "fods", "wk3", "csv", "txt", "sylk", "html", "dif", "dbf", "wk1", "rtf", "prn"
|
||
] as Array<X.BookType>).forEach(function(fmt) {
|
||
if(fmt == "numbers" && !can_write_numbers) return;
|
||
var ws = X.utils.aoa_to_sheet([
|
||
["String", "123"],
|
||
["Number", 123],
|
||
["Boolean", true],
|
||
["Date", new Date()],
|
||
], { cellDates: true });
|
||
ws["B2"].t = ws["B3"].t = ws["B4"].t = "s"
|
||
var wb = X.utils.book_new();
|
||
X.utils.book_append_sheet(wb, ws, "Sheet1");
|
||
X.write(wb, {type: TYPE, bookType: fmt, bookSST: false, numbers:XLSX_ZAHL});
|
||
X.write(wb, {type: TYPE, bookType: fmt, bookSST: true, numbers:XLSX_ZAHL});
|
||
}); });
|
||
});
|
||
|
||
function seq(end: number, start?: number): Array<number> {
|
||
var s = start || 0;
|
||
var o = new Array(end - s);
|
||
for(var i = 0; i != o.length; ++i) o[i] = s + i;
|
||
return o;
|
||
}
|
||
|
||
var dnthresh = /*#__PURE__*/Date.UTC(1899, 11, 30, 0, 0, 0); // -2209161600000
|
||
var dnthresh1 = /*#__PURE__*/Date.UTC(1899, 11, 31, 0, 0, 0); // -2209075200000
|
||
var dnthresh2 = /*#__PURE__*/Date.UTC(1904, 0, 1, 0, 0, 0); // -2209075200000
|
||
function datenum(v: Date, date1904?: boolean): number {
|
||
var epoch = /*#__PURE__*/v.getTime();
|
||
var res = (epoch - dnthresh) / (24 * 60 * 60 * 1000);
|
||
if(date1904) return res - 1462;
|
||
return res < 60 ? res - 1 : res;
|
||
}
|
||
/* Blame https://bugs.chromium.org/p/v8/issues/detail?id=7863 for the regexide */
|
||
var pdre1 = /^(\d+):(\d+)(:\d+)?(\.\d+)?$/; // HH:MM[:SS[.UUU]]
|
||
var pdre2 = /^(\d+)-(\d+)-(\d+)$/; // YYYY-mm-dd
|
||
var pdre3 = /^(\d+)-(\d+)-(\d+)[T ](\d+):(\d+)(:\d+)?(\.\d+)?$/; // YYYY-mm-dd(T or space)HH:MM[:SS[.UUU]] sans "Z"
|
||
/* parses a date string as a UTC date */
|
||
function parseDate(str: string|Date, date1904?: boolean): Date {
|
||
if(str instanceof Date) return str;
|
||
var m = str.match(pdre1);
|
||
if(m) return new Date((date1904 ? dnthresh2 : dnthresh1) + ((parseInt(m[1], 10)*60 + parseInt(m[2], 10))*60 + (m[3] ? parseInt(m[3].slice(1), 10) : 0))*1000 + (m[4] ? parseInt((m[4]+"000").slice(1,4), 10) : 0));
|
||
m = str.match(pdre2);
|
||
if(m) return new Date(Date.UTC(+m[1], +m[2]-1, +m[3], 0, 0, 0, 0));
|
||
m = str.match(pdre3);
|
||
if(m) return new Date(Date.UTC(+m[1], +m[2]-1, +m[3], +m[4], +m[5], ((m[6] && parseInt(m[6].slice(1), 10))|| 0), ((m[7] && parseInt(m[7].slice(1), 10))||0)));
|
||
var d = new Date(str);
|
||
return d;
|
||
}
|
||
|
||
var fixdate = browser ? parseDate("2014-02-19T14:30:00.000Z") : new Date("2014-02-19T14:30Z");
|
||
|
||
Deno.test('roundtrip features', async function(t) {
|
||
await t.step('should preserve core properties', async function(t) { var cp = [
|
||
['xls', paths.cpxls],
|
||
['xlml', paths.cpxml],
|
||
['xlsx', paths.cpxlsx],
|
||
['xlsb', paths.cpxlsb]
|
||
]; for(var cpi = 0; cpi < cp.length; ++cpi) { var w = cp[cpi];
|
||
await t.step(w[0], async function(t) {
|
||
var wb1 = X.read(fs.readFileSync(w[1]), {type:TYPE});
|
||
coreprop(wb1.Props as any);
|
||
var wb2 = X.read(X.write(wb1, {bookType:w[0], type:TYPE}), {type:TYPE});
|
||
coreprop(wb2.Props as any);
|
||
});
|
||
} });
|
||
|
||
await t.step('should preserve custom properties', async function(t) { var cp = [
|
||
['xls', paths.cpxls],
|
||
['xlml', paths.cpxml],
|
||
['xlsx', paths.cpxlsx],
|
||
['xlsb', paths.cpxlsb]
|
||
]; for(var cpi = 0; cpi < cp.length; ++cpi) { var w = cp[cpi];
|
||
await t.step(w[0], async function(t) {
|
||
var wb1 = X.read(fs.readFileSync(w[1]), {type:TYPE});
|
||
custprop(wb1.Custprops);
|
||
var wb2 = X.read(X.write(wb1, {bookType:w[0], type:TYPE}), {type:TYPE});
|
||
custprop(wb2.Custprops);
|
||
});
|
||
} });
|
||
|
||
await t.step('should preserve merge cells', async function(t) {
|
||
var mcf = ["xlsx", "xlsb", "xlml", "ods", "biff8", "numbers"] as Array<X.BookType>; for(let mci = 0; mci < mcf.length; ++mci) { let f = mcf[mci]; await t.step(f, async function(t) {
|
||
if(f == "numbers" && !can_write_numbers) return;
|
||
var wb1 = X.read(fs.readFileSync(paths.mcxlsx), {type:TYPE});
|
||
var wb2 = X.read(X.write(wb1,{bookType:f,type:'binary',numbers:XLSX_ZAHL}),{type:'binary'});
|
||
var m1 = wb1.Sheets["Merge"]?.['!merges']?.map(X.utils.encode_range);
|
||
var m2 = wb2.Sheets["Merge"]?.['!merges']?.map(X.utils.encode_range);
|
||
assert.equal(m1?.length, m2?.length);
|
||
if(m1 && m2) for(var i = 0; i < m1?.length; ++i) assert.assert(m1?.indexOf(m2?.[i]) > -1);
|
||
}); }
|
||
});
|
||
|
||
await t.step('should preserve dates', async function(t) {
|
||
for(var n = 0; n < 16; ++n) {
|
||
var d = (n & 1) ? 'd' : 'n', dk = d === 'd';
|
||
var c = (n & 2) ? 'd' : 'n', dj = c === 'd';
|
||
var b = (n & 4) ? 'd' : 'n', di = b === 'd';
|
||
var a = (n & 8) ? 'd' : 'n', dh = a === 'd';
|
||
var f: string, sheet: string, addr: string;
|
||
if(dh) { f = paths.dtxlsx; sheet = 'Sheet1'; addr = 'B5'; }
|
||
else { f = paths.nfxlsx; sheet = '2011'; addr = 'J36'; }
|
||
await t.step('[' + a + '] -> (' + b + ') -> [' + c + '] -> (' + d + ')', async function(t) {
|
||
var wb1 = X.read(fs.readFileSync(f), {type:TYPE, cellNF: true, cellDates: di, WTF: opts.WTF});
|
||
var _f = X.write(wb1, {type:'binary', cellDates:dj, WTF:opts.WTF});
|
||
var wb2 = X.read(_f, {type:'binary', cellDates: dk, WTF: opts.WTF});
|
||
var m = [wb1,wb2].map(function(x: X.WorkBook) { return get_cell(x.Sheets[sheet], addr); });
|
||
assert.equal(m[0].w, m[1].w);
|
||
|
||
assert.equal(m[0].t, b);
|
||
assert.equal(m[1].t, d);
|
||
|
||
if(m[0].t === 'n' && m[1].t === 'n') assert.equal(m[0].v, m[1].v);
|
||
else if(m[0].t === 'd' && m[1].t === 'd') assert.equal(m[0].v.toString(), m[1].v.toString());
|
||
else if(m[1].t === 'n') assert.assert(Math.abs(datenum(browser ? parseDate(m[0].v) : new Date(m[0].v)) - m[1].v) < 0.01);
|
||
});
|
||
}
|
||
});
|
||
|
||
await t.step('should preserve formulae', async function(t) { var ff = [
|
||
['xlml', paths.fstxml],
|
||
['xlsx', paths.fstxlsx],
|
||
['ods', paths.fstods]
|
||
]; for(let ii = 0; ii < ff.length; ++ii) { let w = ff[ii]; await t.step(w[0], async function(t) {
|
||
var wb1 = X.read(fs.readFileSync(w[1]), {type:TYPE, cellFormula:true, WTF:true});
|
||
var wb2 = X.read(X.write(wb1, {bookType:w[0], type:TYPE}), {cellFormula:true, type:TYPE, WTF:true});
|
||
wb1.SheetNames.forEach(function(n) {
|
||
assert.equal(
|
||
X.utils.sheet_to_formulae(wb1.Sheets[n]).sort().join("\n"),
|
||
X.utils.sheet_to_formulae(wb2.Sheets[n]).sort().join("\n")
|
||
);
|
||
});
|
||
}); } });
|
||
|
||
await t.step('should preserve dynamic array formulae', async function(t) { var m19 = [
|
||
['xlsx', paths.m19xlsx]
|
||
]; for(var h1 = 0; h1 < m19.length; ++h1) { var w = m19[h1]; await t.step(w[0], async function(t) {
|
||
var wb1 = X.read(fs.readFileSync(w[1]), {xlfn: true, type:TYPE, cellFormula:true, WTF:true});
|
||