Improve python library, add LLGLTF asset parser, Add tests
This commit is contained in:
37
lib/classes/LLGLTFMaterial.spec.ts
Normal file
37
lib/classes/LLGLTFMaterial.spec.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { LLGLTFMaterial } from './LLGLTFMaterial';
|
||||
import * as assert from 'assert';
|
||||
|
||||
describe('LLGLTFMaterial', () =>
|
||||
{
|
||||
describe('parse', () =>
|
||||
{
|
||||
it ('should parse a valid GLTF material asset', () =>
|
||||
{
|
||||
const buf = Buffer.from('PD8gTExTRC9CaW5hcnkgPz4KewAAAANrAAAABGRhdGFzAAABznsiYXNzZXQiOnsidmVyc2lvbiI6IjIuMCJ9LCJpbWFnZXMiOlt7InVyaSI6IjJjN2U3MzMyLTM3MTctNWY0ZS04ZjIyLTZlZTlkYTUyNzVmYiJ9LHsidXJpIjoiMTA3OGY1ZWMtMWM1Ni1lNzNmLThmOGYtYmUzNmM0MGU1MTIxIn0seyJ1cmkiOiIyMzBkMmQyZC1iMDkyLTliZjUtYmE3ZS1iMzE5NTY2MzIyYTYifSx7InVyaSI6IjIzMGQyZDJkLWIwOTItOWJmNS1iYTdlLWIzMTk1NjYzMjJhNiJ9XSwibWF0ZXJpYWxzIjpbeyJub3JtYWxUZXh0dXJlIjp7ImluZGV4IjoxfSwib2NjbHVzaW9uVGV4dHVyZSI6eyJpbmRleCI6M30sInBick1ldGFsbGljUm91Z2huZXNzIjp7ImJhc2VDb2xvclRleHR1cmUiOnsiaW5kZXgiOjB9LCJtZXRhbGxpY1JvdWdobmVzc1RleHR1cmUiOnsiaW5kZXgiOjJ9fX1dLCJ0ZXh0dXJlcyI6W3sic291cmNlIjowfSx7InNvdXJjZSI6MX0seyJzb3VyY2UiOjJ9LHsic291cmNlIjozfV19CmsAAAAEdHlwZXMAAAAIR0xURiAyLjBrAAAAB3ZlcnNpb25zAAAAAzEuMX0A', 'base64');
|
||||
const mat = new LLGLTFMaterial(buf);
|
||||
|
||||
assert.equal(mat.version, '1.1');
|
||||
assert.equal(mat.type, 'GLTF 2.0');
|
||||
assert.ok(mat.data);
|
||||
assert.equal(mat.data.asset.version, '2.0');
|
||||
assert.equal(mat.data.images.length, 4);
|
||||
assert.equal(mat.data.images[0].uri, '2c7e7332-3717-5f4e-8f22-6ee9da5275fb');
|
||||
assert.equal(mat.data.images[1].uri, '1078f5ec-1c56-e73f-8f8f-be36c40e5121');
|
||||
assert.equal(mat.data.images[2].uri, '230d2d2d-b092-9bf5-ba7e-b319566322a6');
|
||||
assert.equal(mat.data.images[3].uri, '230d2d2d-b092-9bf5-ba7e-b319566322a6');
|
||||
assert.equal(mat.data.materials.length, 1);
|
||||
|
||||
const mat0 = mat.data.materials[0];
|
||||
assert.equal(mat0.normalTexture.index, 1);
|
||||
assert.equal(mat0.occlusionTexture.index, 3);
|
||||
assert.equal(mat0.pbrMetallicRoughness.baseColorTexture.index, 0);
|
||||
assert.equal(mat0.pbrMetallicRoughness.metallicRoughnessTexture.index, 2);
|
||||
|
||||
assert.equal(mat.data.textures.length, 4);
|
||||
assert.equal(mat.data.textures[0].source, 0);
|
||||
assert.equal(mat.data.textures[1].source, 1);
|
||||
assert.equal(mat.data.textures[2].source, 2);
|
||||
assert.equal(mat.data.textures[3].source, 3);
|
||||
});
|
||||
});
|
||||
});
|
||||
41
lib/classes/LLGLTFMaterial.ts
Normal file
41
lib/classes/LLGLTFMaterial.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import * as LLSD from '@caspertech/llsd';
|
||||
import { LLGLTFMaterialData } from './LLGLTFMaterialData';
|
||||
|
||||
export class LLGLTFMaterial
|
||||
{
|
||||
public type?: string;
|
||||
public version?: string;
|
||||
public data?: LLGLTFMaterialData;
|
||||
|
||||
public constructor(data?: Buffer)
|
||||
{
|
||||
if (data !== undefined)
|
||||
{
|
||||
const header = data.slice(0, 18).toString('utf-8');
|
||||
if (header.length !== 18 || header !== '<? LLSD/Binary ?>\n')
|
||||
{
|
||||
throw new Error('Failed to parse LLGLTFMaterial');
|
||||
}
|
||||
|
||||
const body = new LLSD.Binary(Array.from(data.slice(18)), 'BINARY');
|
||||
const llsd = LLSD.LLSD.parseBinary(body);
|
||||
if (!llsd.result)
|
||||
{
|
||||
throw new Error('Failed to decode LLGLTFMaterial');
|
||||
}
|
||||
if (llsd.result.type)
|
||||
{
|
||||
this.type = String(llsd.result.type);
|
||||
}
|
||||
if (llsd.result.version)
|
||||
{
|
||||
this.version = String(llsd.result.version);
|
||||
}
|
||||
if (llsd.result.data)
|
||||
{
|
||||
const assetData = String(llsd.result.data);
|
||||
this.data = JSON.parse(assetData);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
28
lib/classes/LLGLTFMaterialData.ts
Normal file
28
lib/classes/LLGLTFMaterialData.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
export interface LLGLTFMaterialData
|
||||
{
|
||||
asset: {
|
||||
version: string;
|
||||
};
|
||||
images: {
|
||||
uri: string;
|
||||
}[];
|
||||
materials: {
|
||||
normalTexture: {
|
||||
index: number
|
||||
},
|
||||
occlusionTexture: {
|
||||
index: number;
|
||||
},
|
||||
pbrMetallicRoughness: {
|
||||
baseColorTexture: {
|
||||
index: number
|
||||
},
|
||||
metallicRoughnessTexture: {
|
||||
index: number
|
||||
}
|
||||
}
|
||||
}[];
|
||||
textures: {
|
||||
source: number
|
||||
}[];
|
||||
}
|
||||
@@ -30,10 +30,13 @@ export class ImprovedInstantMessageMessage implements MessageBase
|
||||
Message: Buffer;
|
||||
BinaryBucket: Buffer;
|
||||
};
|
||||
EstateBlock: {
|
||||
EstateID: number;
|
||||
};
|
||||
|
||||
getSize(): number
|
||||
{
|
||||
return (this.MessageBlock['FromAgentName'].length + 1 + this.MessageBlock['Message'].length + 2 + this.MessageBlock['BinaryBucket'].length + 2) + 103;
|
||||
return (this.MessageBlock['FromAgentName'].length + 1 + this.MessageBlock['Message'].length + 2 + this.MessageBlock['BinaryBucket'].length + 2) + 107;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
@@ -70,6 +73,8 @@ export class ImprovedInstantMessageMessage implements MessageBase
|
||||
pos += 2;
|
||||
this.MessageBlock['BinaryBucket'].copy(buf, pos);
|
||||
pos += this.MessageBlock['BinaryBucket'].length;
|
||||
buf.writeUInt32LE(this.EstateBlock['EstateID'], pos);
|
||||
pos += 4;
|
||||
return pos - startPos;
|
||||
}
|
||||
|
||||
@@ -144,7 +149,14 @@ export class ImprovedInstantMessageMessage implements MessageBase
|
||||
newObjMessageBlock['BinaryBucket'] = buf.slice(pos, pos + varLength);
|
||||
pos += varLength;
|
||||
this.MessageBlock = newObjMessageBlock;
|
||||
const newObjEstateBlock: {
|
||||
EstateID: number
|
||||
} = {
|
||||
EstateID: 0
|
||||
};
|
||||
newObjEstateBlock['EstateID'] = buf.readUInt32LE(pos);
|
||||
pos += 4;
|
||||
this.EstateBlock = newObjEstateBlock;
|
||||
return pos - startPos;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -47,6 +47,11 @@ export class PythonList extends PythonObject
|
||||
throw new Error('Expected ] end bracket in list')
|
||||
}
|
||||
|
||||
public get(index: number): PythonType | undefined
|
||||
{
|
||||
return this.data[index];
|
||||
}
|
||||
|
||||
get length(): number
|
||||
{
|
||||
return this.data.length;
|
||||
|
||||
148
lib/classes/python/PythonParser.spec.ts
Normal file
148
lib/classes/python/PythonParser.spec.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { PythonParser } from './PythonParser';
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { PythonDict } from './PythonDict';
|
||||
import { PythonList } from './PythonList';
|
||||
import { PythonTuple } from './PythonTuple';
|
||||
|
||||
describe('PythonParser', () =>
|
||||
{
|
||||
describe('parse', () =>
|
||||
{
|
||||
it('can parse a complex python dictionary notation', () =>
|
||||
{
|
||||
const notationDoc = `{
|
||||
"nested_dict": {
|
||||
"key1": "value1",
|
||||
"key2": {
|
||||
"inner_key": "inner_value"
|
||||
}
|
||||
},
|
||||
"list": [1, 2, 3, [4, 5]],
|
||||
"boolean": True,
|
||||
"tuple": (1, 2, ("nested_tuple", 3)),
|
||||
"bytes": b'hello',
|
||||
"float": 3.14,
|
||||
'integer': 42,
|
||||
"hex_number": 0x1A,
|
||||
"octal_number": 0o52,
|
||||
"string_single": 'single-quoted\\' string',
|
||||
"string_double": "double-quoted \\" string",
|
||||
"string_triple_single": '''triple-quoted\'
|
||||
single-quoted string''',
|
||||
"string_triple_double": """triple-quoted\"
|
||||
double-quoted string""",
|
||||
"raw_string_single": r'raw single-quoted\\ string',
|
||||
"raw_string_double": r"raw double-quoted\\ string",
|
||||
"raw_string_triple_single": r'''raw triple\\''-quoted
|
||||
single-quoted string''',
|
||||
"raw_string_triple_double": r"""raw triple\\""-quoted
|
||||
double-quoted string"""
|
||||
}`;
|
||||
const parsed = PythonParser.parse(notationDoc);
|
||||
if (!(parsed instanceof PythonDict))
|
||||
{
|
||||
assert(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const nested = parsed.get('nested_dict');
|
||||
assert.ok(nested);
|
||||
if (!(nested instanceof PythonDict))
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert.equal(nested.get('key1'), 'value1');
|
||||
const key2 = nested.get('key2');
|
||||
if (!(key2 instanceof PythonDict))
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert.equal(key2.get('inner_key'), 'inner_value');
|
||||
}
|
||||
}
|
||||
|
||||
const list = parsed.get('list');
|
||||
if (!(list instanceof PythonList))
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert.equal(list.length, 4);
|
||||
assert.equal(list.get(0), 1);
|
||||
assert.equal(list.get(1), 2);
|
||||
assert.equal(list.get(2), 3);
|
||||
const nestedList = list.get(3);
|
||||
if (!(nestedList instanceof PythonList))
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert.equal(nestedList.get(0), 4);
|
||||
assert.equal(nestedList.get(1), 5);
|
||||
}
|
||||
assert.equal(list.get(4), undefined);
|
||||
}
|
||||
|
||||
assert.equal(parsed.get('boolean'), true);
|
||||
const tuple = parsed.get('tuple');
|
||||
if (!(tuple instanceof PythonTuple))
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert.equal(tuple.get(0), 1);
|
||||
assert.equal(tuple.get(1), 2);
|
||||
const nestedTuple = tuple.get(2);
|
||||
if (!(nestedTuple instanceof PythonTuple))
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert.equal(nestedTuple.get(0), 'nested_tuple');
|
||||
assert.equal(nestedTuple.get(1), 3);
|
||||
}
|
||||
assert.equal(tuple.get(3), undefined);
|
||||
}
|
||||
const buf = parsed.get('bytes');
|
||||
if (buf instanceof Buffer)
|
||||
{
|
||||
assert.equal(Buffer.from('hello', 'binary').compare(buf), 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
assert.equal(parsed.get('float'), 3.14);
|
||||
assert.equal(parsed.get('integer'), 42);
|
||||
assert.equal(parsed.get('hex_number'), 26);
|
||||
assert.equal(parsed.get('octal_number'), 42);
|
||||
assert.equal(parsed.get('string_single'), 'single-quoted\\\' string');
|
||||
assert.equal(parsed.get('string_double'), 'double-quoted \\" string');
|
||||
assert.equal(parsed.get('string_triple_single'), 'triple-quoted\'\nsingle-quoted string');
|
||||
assert.equal(parsed.get('string_triple_double'), 'triple-quoted\"\ndouble-quoted string');
|
||||
|
||||
/*
|
||||
raw_string_single": r'raw single-quoted\ string',
|
||||
"raw_string_double": r"raw double-quoted\ string",
|
||||
"raw_string_triple_single": r'''raw triple\''-quoted
|
||||
single-quoted string''',
|
||||
"raw_string_triple_double": r"""raw triple\''-quoted
|
||||
double-quoted string"""
|
||||
*/
|
||||
assert.equal(parsed.get('raw_string_single'), 'raw single-quoted\\ string');
|
||||
assert.equal(parsed.get('raw_string_double'), 'raw double-quoted\\ string');
|
||||
assert.equal(parsed.get('raw_string_triple_single'), 'raw triple\\\'\'-quoted\nsingle-quoted string');
|
||||
assert.equal(parsed.get('raw_string_triple_double'), 'raw triple\\""-quoted\ndouble-quoted string');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,27 +23,30 @@ export class PythonParser
|
||||
{ regex: /^[,]/, type: PythonTokenType.COMMA },
|
||||
{ regex: /^None\b/, type: PythonTokenType.NONE },
|
||||
{ regex: /^(True|False)\b/, type: PythonTokenType.BOOLEAN },
|
||||
{ regex: /^((?:-?[0-9]+\.[0-9]*)|(?:-?[0.9]*\.[0-9]+))/, type: PythonTokenType.FLOAT },
|
||||
{ regex: /^\d+\b/, type: PythonTokenType.INTEGER },
|
||||
{ regex: /^0x([0-9a-fA-F]+\b)/, type: PythonTokenType.HEX },
|
||||
{ regex: /^0o([0-7]+)/, type: PythonTokenType.OCTAL },
|
||||
{ regex: /^b'(\\[0-7]{3}|\\x[0-9A-Fa-f]{2}|\\['"abfnrtv\\]|[^'\\])*'/, type: PythonTokenType.BYTES },
|
||||
{ regex: /^b"(\\[0-7]{3}|\\x[0-9A-Fa-f]{2}|\\["'abfnrtv\\]|[^"\\])*"/, type: PythonTokenType.BYTES },
|
||||
{ regex: /^-?\d+\.\d+\b/, type: PythonTokenType.FLOAT },
|
||||
{ regex: /^-?\d+\.?\d*[jJ]\b/, type: PythonTokenType.COMPLEX },
|
||||
{ regex: /^\(/, type: PythonTokenType.TUPLE_START },
|
||||
{ regex: /^\)/, type: PythonTokenType.TUPLE_END },
|
||||
{ regex: /^\[/, type: PythonTokenType.LIST_START },
|
||||
{ regex: /^\]/, type: PythonTokenType.LIST_END },
|
||||
{ regex: /^'((?:\\.|[^'\\])*)'/, type: PythonTokenType.STRING }, // Single-quoted strings
|
||||
{ regex: /^"((?:\\.|[^"\\])*)"/, type: PythonTokenType.STRING }, // Double-quoted strings
|
||||
{ regex: /^'''((?:\\.|[^'\\]|'{1,2}(?![']))*)'''/, type: PythonTokenType.STRING }, // Triple-quoted single strings
|
||||
{ regex: /^"""((?:\\.|[^"\\]|"{1,2}(?!["]))*)"""/, type: PythonTokenType.STRING }, // Triple-quoted double strings
|
||||
{ regex: /^r'((?:\\.|[^'\\])*)'/, type: PythonTokenType.STRING }, // Raw single-quoted strings
|
||||
{ regex: /^r"((?:\\.|[^"\\])*)"/, type: PythonTokenType.STRING }, // Raw double-quoted strings
|
||||
{ regex: /^\\u[\dA-Fa-f]{4}/, type: PythonTokenType.STRING }, // Unicode escape sequences
|
||||
{ regex: /^\\U[\dA-Fa-f]{8}/, type: PythonTokenType.STRING }, // Unicode escape sequences
|
||||
{ regex: /^"""((?:[^"]*|\n|\\"|")*?)"""/, type: PythonTokenType.STRING }, // triple double quoted string
|
||||
{ regex: /^'''((?:[^']*|\n|\\'|')*?)'''/, type: PythonTokenType.STRING }, // triple single quoted string
|
||||
{ regex: /^'([^'\\]*(?:\\.[^'\\\n]*)*)'/, type: PythonTokenType.STRING }, // single quoted string
|
||||
{ regex: /^"([^"\\]*(?:\\.[^"\\\n]*)*)"/, type: PythonTokenType.STRING }, // double quoted string
|
||||
|
||||
{ regex: /^b"""((?:[^"]*|\n|\\"|")*?)"""/, type: PythonTokenType.BINARY_STRING }, // triple double quoted string
|
||||
{ regex: /^b'''((?:[^']*|\n|\\'|')*?)'''/, type: PythonTokenType.BINARY_STRING }, // triple single quoted string
|
||||
{ regex: /^b'([^'\\]*(?:\\.[^'\\\n]*)*)'/, type: PythonTokenType.BINARY_STRING }, // single quoted string
|
||||
{ regex: /^b"([^"\\]*(?:\\.[^"\\\n]*)*)"/, type: PythonTokenType.BINARY_STRING }, // double quoted string
|
||||
|
||||
{ regex: /^r"""((?:[^"]*|\n|")*?)"""/, type: PythonTokenType.RAW_STRING }, // triple double quoted string
|
||||
{ regex: /^r'''((?:[^']*|\n|')*?)'''/, type: PythonTokenType.RAW_STRING }, // triple single quoted string
|
||||
{ regex: /^r'([^'\n]*?)'/, type: PythonTokenType.RAW_STRING }, // single quoted string
|
||||
{ regex: /^r"([^"\n]*?)"/, type: PythonTokenType.RAW_STRING }, // double quoted string
|
||||
|
||||
{ regex: /^-?\d+\.?\d*[eE][-+]?\d+/, type: PythonTokenType.FLOAT }, // Scientific notation
|
||||
{ regex: /^-?\.\d+\b/, type: PythonTokenType.FLOAT }, // Leading dot float, e.g., .123
|
||||
{ regex: /^\d+(_\d+)*\b/, type: PythonTokenType.INTEGER }, // Integer with underscores, e.g., 1_000_000
|
||||
{ regex: /^[^\s:{},"'\[\]\(\)]+/, type: PythonTokenType.UNKNOWN } // Catch all for other non-structured sequences
|
||||
];
|
||||
@@ -89,50 +92,6 @@ export class PythonParser
|
||||
}
|
||||
}
|
||||
|
||||
private static interpretEscapes(byteString: string): Buffer
|
||||
{
|
||||
const byteArray: number[] = [];
|
||||
const regex = /\\x([0-9A-Fa-f]{2})|\\([0-7]{1,3})|\\(['"abfnrtv\\])|([^\\]+)/g;
|
||||
let match: RegExpExecArray | null;
|
||||
|
||||
while ((match = regex.exec(byteString)) !== null)
|
||||
{
|
||||
if (match[1]) // Hexadecimal sequence
|
||||
{
|
||||
byteArray.push(parseInt(match[1], 16));
|
||||
}
|
||||
else if (match[2]) // Octal sequence
|
||||
{
|
||||
byteArray.push(parseInt(match[2], 8));
|
||||
}
|
||||
else if (match[3]) // Special escape character
|
||||
{
|
||||
const specialChars: { [key: string]: number } = {
|
||||
'a': 7, // Alert (bell)
|
||||
'b': 8, // Backspace
|
||||
'f': 12, // Formfeed
|
||||
'n': 10, // New line
|
||||
'r': 13, // Carriage return
|
||||
't': 9, // Horizontal tab
|
||||
'v': 11, // Vertical tab
|
||||
'\\': 92, // Backslash
|
||||
'\'': 39, // Single quote
|
||||
'"': 34, // Double quote
|
||||
};
|
||||
byteArray.push(specialChars[match[3]]);
|
||||
}
|
||||
else if (match[4]) // Regular characters
|
||||
{
|
||||
for (let i = 0; i < match[4].length; ++i)
|
||||
{
|
||||
byteArray.push(match[4].charCodeAt(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Buffer.from(byteArray);
|
||||
}
|
||||
|
||||
public static parseValueToken(container: PythonTokenContainer): PythonType
|
||||
{
|
||||
const t = container.tokens[container.index++];
|
||||
@@ -146,6 +105,14 @@ export class PythonParser
|
||||
{
|
||||
return t.value;
|
||||
}
|
||||
case PythonTokenType.BINARY_STRING:
|
||||
{
|
||||
return Buffer.from(t.value, 'binary');
|
||||
}
|
||||
case PythonTokenType.RAW_STRING:
|
||||
{
|
||||
return t.value;
|
||||
}
|
||||
case PythonTokenType.BOOLEAN:
|
||||
{
|
||||
return t.value === 'True';
|
||||
@@ -158,10 +125,6 @@ export class PythonParser
|
||||
{
|
||||
return PythonTuple.parse(container);
|
||||
}
|
||||
case PythonTokenType.BYTES:
|
||||
{
|
||||
return this.interpretEscapes(t.value);
|
||||
}
|
||||
case PythonTokenType.NONE:
|
||||
{
|
||||
return null;
|
||||
@@ -182,10 +145,6 @@ export class PythonParser
|
||||
{
|
||||
return parseFloat(t.value);
|
||||
}
|
||||
case PythonTokenType.COMPLEX:
|
||||
{
|
||||
throw new Error('Complex numbers are currently unhandled');
|
||||
}
|
||||
default:
|
||||
throw new Error('Unexpected token: ' + PythonTokenType[t.type]);
|
||||
}
|
||||
|
||||
@@ -8,13 +8,13 @@ export enum PythonTokenType
|
||||
BOOLEAN,
|
||||
INTEGER,
|
||||
FLOAT,
|
||||
COMPLEX,
|
||||
STRING,
|
||||
BINARY_STRING,
|
||||
RAW_STRING,
|
||||
LIST_START,
|
||||
LIST_END,
|
||||
TUPLE_START,
|
||||
TUPLE_END,
|
||||
BYTES,
|
||||
HEX,
|
||||
OCTAL,
|
||||
UNKNOWN // Catch all for other sequences
|
||||
|
||||
@@ -10,7 +10,7 @@ export class PythonTuple extends PythonObject
|
||||
|
||||
public static parse(container: PythonTokenContainer): PythonTuple
|
||||
{
|
||||
let expectingComma = true;
|
||||
let expectingComma = false;
|
||||
const tuple = new PythonTuple();
|
||||
do
|
||||
{
|
||||
@@ -48,6 +48,11 @@ export class PythonTuple extends PythonObject
|
||||
throw new Error('Expected ) end bracket in tuple')
|
||||
}
|
||||
|
||||
public get(index: number): PythonType | undefined
|
||||
{
|
||||
return this.data[index];
|
||||
}
|
||||
|
||||
get length(): number
|
||||
{
|
||||
return this.data.length;
|
||||
|
||||
@@ -105,6 +105,10 @@ import { LLGestureWaitStep } from './classes/LLGestureWaitStep';
|
||||
import { LLGestureChatStep } from './classes/LLGestureChatStep';
|
||||
import { LLGestureStepType } from './enums/LLGestureStepType';
|
||||
import { LLLindenText } from './classes/LLLindenText';
|
||||
import { LLGLTFMaterial } from './classes/LLGLTFMaterial';
|
||||
import { ExtendedMeshData } from './classes/public/ExtendedMeshData';
|
||||
import { ReflectionProbeData } from './classes/public/ReflectionProbeData';
|
||||
import { RenderMaterialData } from './classes/public/RenderMaterialData';
|
||||
|
||||
export {
|
||||
Bot,
|
||||
@@ -119,6 +123,7 @@ export {
|
||||
TextureEntry,
|
||||
LLWearable,
|
||||
LLLindenText,
|
||||
LLGLTFMaterial,
|
||||
LLGesture,
|
||||
LLGestureAnimationStep,
|
||||
LLGestureSoundStep,
|
||||
@@ -218,6 +223,9 @@ export {
|
||||
InventoryItem,
|
||||
TarReader,
|
||||
TarWriter,
|
||||
ExtendedMeshData,
|
||||
ReflectionProbeData,
|
||||
RenderMaterialData,
|
||||
|
||||
// Public Interfaces
|
||||
GlobalPosition,
|
||||
|
||||
@@ -1,167 +1,167 @@
|
||||
import 'mocha';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { Packet } from '../classes/Packet';
|
||||
import { DecodeFlags } from '../enums/DecodeFlags';
|
||||
import { PacketFlags } from '../enums/PacketFlags';
|
||||
|
||||
function compareArrays(arr1: any[], arr2: any[]): boolean
|
||||
{
|
||||
if (arr1.length === arr2.length
|
||||
&& arr1.every(function(u, i): boolean
|
||||
{
|
||||
return u === arr2[i];
|
||||
})
|
||||
)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
describe('Packets', () =>
|
||||
{
|
||||
const p = path.resolve(__dirname + '/../../../testing/packets');
|
||||
const files = fs.readdirSync(p);
|
||||
for (const file of files)
|
||||
{
|
||||
if (file.substr(file.length - 7) === '.packet')
|
||||
{
|
||||
const fullPath = p + '/' + file;
|
||||
const stats = fs.statSync(fullPath);
|
||||
if (!stats.isDirectory())
|
||||
{
|
||||
describe(file, () =>
|
||||
{
|
||||
let pos = 0;
|
||||
let data: Buffer = Buffer.allocUnsafe(0);
|
||||
const packet: Packet = new Packet();
|
||||
const acksReceived: number[] = [];
|
||||
const acksSent: number[] = [];
|
||||
|
||||
it('should decode correctly', (done) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
data = fs.readFileSync(fullPath);
|
||||
pos = packet.readFromBuffer(data, 0, (number) =>
|
||||
{
|
||||
acksReceived.push(number);
|
||||
}, (number) =>
|
||||
{
|
||||
acksSent.push(number);
|
||||
});
|
||||
done();
|
||||
}
|
||||
catch (err)
|
||||
{
|
||||
done(err);
|
||||
}
|
||||
});
|
||||
|
||||
it('should have read the entire packet', (done) =>
|
||||
{
|
||||
if (pos < data.length)
|
||||
{
|
||||
done('Finished reading but we\'re not at the end of the packet (' + pos + ' < ' + data.length + ', seq ' + packet.sequenceNumber + ')');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
const jsonFN = fullPath.replace('.packet', '.json');
|
||||
const jsFile = fs.readFileSync(jsonFN);
|
||||
const json = JSON.parse(jsFile.toString('utf8'));
|
||||
|
||||
it('should have sent the correct number of packet ACKs', (done) =>
|
||||
{
|
||||
if (!compareArrays(json.sentAcks, acksSent))
|
||||
{
|
||||
done('Sent acks does not match expected');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
it('should have received the correct number of packet ACKs', (done) =>
|
||||
{
|
||||
if (!compareArrays(json.receivedAcks, acksReceived))
|
||||
{
|
||||
done('Received acks does not match expected');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
it('should match our expected decoded data', (done) =>
|
||||
{
|
||||
let pckt = json['packet'];
|
||||
pckt = JSON.stringify(pckt);
|
||||
if (JSON.stringify(packet) !== pckt)
|
||||
{
|
||||
done('JSON strings do not match');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
let buf = Buffer.allocUnsafe(0);
|
||||
let extra = 0;
|
||||
it('should encode back to binary', (done) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
buf = Buffer.alloc(packet.getSize());
|
||||
buf = packet.writeToBuffer(buf, 0, DecodeFlags.DontChangeFlags);
|
||||
|
||||
// Account for appended acks
|
||||
let bl = buf.length;
|
||||
if (packet.packetFlags & PacketFlags.Ack)
|
||||
{
|
||||
extra += 4 * acksReceived.length;
|
||||
extra++;
|
||||
}
|
||||
bl += extra;
|
||||
|
||||
if (data.length !== bl)
|
||||
{
|
||||
console.log(buf.toString('hex'));
|
||||
console.log(data.toString('hex'));
|
||||
done('Packet size ' + bl + ' but expected length was ' + data.length + ' sentAcks: ' + acksSent.length + ', receivedAcks: ' + acksReceived.length + ', getSize: ' + packet.getSize());
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
}
|
||||
catch (err)
|
||||
{
|
||||
done(err);
|
||||
}
|
||||
});
|
||||
it('should match the original packet byte-for-byte', (done) =>
|
||||
{
|
||||
// First trim off the extra bytes
|
||||
const trimmedData = data.slice(0, data.length - extra);
|
||||
if (trimmedData.compare(buf) !== 0)
|
||||
{
|
||||
done('Buffers do not match');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
import 'mocha';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { Packet } from '../classes/Packet';
|
||||
import { DecodeFlags } from '../enums/DecodeFlags';
|
||||
import { PacketFlags } from '../enums/PacketFlags';
|
||||
|
||||
function compareArrays(arr1: any[], arr2: any[]): boolean
|
||||
{
|
||||
if (arr1.length === arr2.length
|
||||
&& arr1.every(function(u, i): boolean
|
||||
{
|
||||
return u === arr2[i];
|
||||
})
|
||||
)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
describe('Packets', () =>
|
||||
{
|
||||
const p = path.resolve(__dirname + '/../../../testing/packets');
|
||||
const files = fs.readdirSync(p);
|
||||
for (const file of files)
|
||||
{
|
||||
if (file.substr(file.length - 7) === '.packet')
|
||||
{
|
||||
const fullPath = p + '/' + file;
|
||||
const stats = fs.statSync(fullPath);
|
||||
if (!stats.isDirectory())
|
||||
{
|
||||
describe(file, () =>
|
||||
{
|
||||
let pos = 0;
|
||||
let data: Buffer = Buffer.allocUnsafe(0);
|
||||
const packet: Packet = new Packet();
|
||||
const acksReceived: number[] = [];
|
||||
const acksSent: number[] = [];
|
||||
|
||||
it('should decode correctly', (done) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
data = fs.readFileSync(fullPath);
|
||||
pos = packet.readFromBuffer(data, 0, (number) =>
|
||||
{
|
||||
acksReceived.push(number);
|
||||
}, (number) =>
|
||||
{
|
||||
acksSent.push(number);
|
||||
});
|
||||
done();
|
||||
}
|
||||
catch (err)
|
||||
{
|
||||
done(err);
|
||||
}
|
||||
});
|
||||
|
||||
it('should have read the entire packet', (done) =>
|
||||
{
|
||||
if (pos < data.length)
|
||||
{
|
||||
done('Finished reading but we\'re not at the end of the packet (' + pos + ' < ' + data.length + ', seq ' + packet.sequenceNumber + ')');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
const jsonFN = fullPath.replace('.packet', '.json');
|
||||
const jsFile = fs.readFileSync(jsonFN);
|
||||
const json = JSON.parse(jsFile.toString('utf8'));
|
||||
|
||||
it('should have sent the correct number of packet ACKs', (done) =>
|
||||
{
|
||||
if (!compareArrays(json.sentAcks, acksSent))
|
||||
{
|
||||
done('Sent acks does not match expected');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
it('should have received the correct number of packet ACKs', (done) =>
|
||||
{
|
||||
if (!compareArrays(json.receivedAcks, acksReceived))
|
||||
{
|
||||
done('Received acks does not match expected');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
it('should match our expected decoded data', (done) =>
|
||||
{
|
||||
let pckt = json['packet'];
|
||||
pckt = JSON.stringify(pckt);
|
||||
if (JSON.stringify(packet) !== pckt)
|
||||
{
|
||||
done('JSON strings do not match');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
let buf = Buffer.allocUnsafe(0);
|
||||
let extra = 0;
|
||||
it('should encode back to binary', (done) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
buf = Buffer.alloc(packet.getSize());
|
||||
buf = packet.writeToBuffer(buf, 0, DecodeFlags.DontChangeFlags);
|
||||
|
||||
// Account for appended acks
|
||||
let bl = buf.length;
|
||||
if (packet.packetFlags & PacketFlags.Ack)
|
||||
{
|
||||
extra += 4 * acksReceived.length;
|
||||
extra++;
|
||||
}
|
||||
bl += extra;
|
||||
|
||||
if (data.length !== bl)
|
||||
{
|
||||
console.log(buf.toString('hex'));
|
||||
console.log(data.toString('hex'));
|
||||
done('Packet size ' + bl + ' but expected length was ' + data.length + ' sentAcks: ' + acksSent.length + ', receivedAcks: ' + acksReceived.length + ', getSize: ' + packet.getSize());
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
}
|
||||
catch (err)
|
||||
{
|
||||
done(err);
|
||||
}
|
||||
});
|
||||
it('should match the original packet byte-for-byte', (done) =>
|
||||
{
|
||||
// First trim off the extra bytes
|
||||
const trimmedData = data.slice(0, data.length - extra);
|
||||
if (trimmedData.compare(buf) !== 0)
|
||||
{
|
||||
done('Buffers do not match');
|
||||
}
|
||||
else
|
||||
{
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -1,109 +1,109 @@
|
||||
import 'mocha';
|
||||
import validator from 'validator';
|
||||
import * as assert from 'assert';
|
||||
import { UUID } from '../classes/UUID';
|
||||
|
||||
describe('UUID', () =>
|
||||
{
|
||||
describe('random', () =>
|
||||
{
|
||||
it ('should generate a random, valid v4 UUID', () =>
|
||||
{
|
||||
const uuid = UUID.random().toString();
|
||||
const secondUUID = UUID.random().toString();
|
||||
|
||||
if (typeof uuid !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(uuid))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
if (uuid === '00000000-0000-0000-0000-000000000000')
|
||||
{
|
||||
assert.fail('Random UUID should not be zero');
|
||||
}
|
||||
if (typeof secondUUID !== 'string')
|
||||
{
|
||||
assert.fail('Returned second UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(secondUUID))
|
||||
{
|
||||
assert.fail('Second UUID is not a valid UUID');
|
||||
}
|
||||
if (secondUUID === '00000000-0000-0000-0000-000000000000')
|
||||
{
|
||||
assert.fail('Random UUID should not be zero');
|
||||
}
|
||||
if (uuid === secondUUID)
|
||||
{
|
||||
assert.fail('Two random UUIDs match! (Not random)');
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
describe('zero', () =>
|
||||
{
|
||||
it ('should generate a zeroed, valid v4 UUID', () =>
|
||||
{
|
||||
const uuid = UUID.zero().toString();
|
||||
if (typeof uuid !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(uuid))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
if (uuid !== '00000000-0000-0000-0000-000000000000')
|
||||
{
|
||||
assert.fail('UUID is not zero')
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
describe('encode/decode', () =>
|
||||
{
|
||||
it ('should correctly decode a 16-byte UUID from a buffer', () =>
|
||||
{
|
||||
const buf = Buffer.from('00004af668bb6fe34893881408f586c5657c4e1c9910', 'hex');
|
||||
const uuid = new UUID(buf, 2);
|
||||
const str = uuid.toString();
|
||||
if (typeof str !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(str))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
if (str !== '4af668bb-6fe3-4893-8814-08f586c5657c')
|
||||
{
|
||||
assert.fail('UUID decoded incorrectly');
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
it ('should correct encode a UUID into the correct position in a buffer', () =>
|
||||
{
|
||||
const buf = Buffer.alloc(22);
|
||||
const uuid = new UUID('4af668bb-6fe3-4893-8814-08f586c5657c');
|
||||
uuid.writeToBuffer(buf, 2);
|
||||
const bufCmp = Buffer.from('00004af668bb6fe34893881408f586c5657c00000000', 'hex');
|
||||
if (buf.compare(bufCmp) !== 0)
|
||||
{
|
||||
assert.fail('Encoded buffer does not match expected output');
|
||||
}
|
||||
const result = uuid.toString();
|
||||
if (typeof result !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(result))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
import 'mocha';
|
||||
import validator from 'validator';
|
||||
import * as assert from 'assert';
|
||||
import { UUID } from '../classes/UUID';
|
||||
|
||||
describe('UUID', () =>
|
||||
{
|
||||
describe('random', () =>
|
||||
{
|
||||
it ('should generate a random, valid v4 UUID', () =>
|
||||
{
|
||||
const uuid = UUID.random().toString();
|
||||
const secondUUID = UUID.random().toString();
|
||||
|
||||
if (typeof uuid !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(uuid))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
if (uuid === '00000000-0000-0000-0000-000000000000')
|
||||
{
|
||||
assert.fail('Random UUID should not be zero');
|
||||
}
|
||||
if (typeof secondUUID !== 'string')
|
||||
{
|
||||
assert.fail('Returned second UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(secondUUID))
|
||||
{
|
||||
assert.fail('Second UUID is not a valid UUID');
|
||||
}
|
||||
if (secondUUID === '00000000-0000-0000-0000-000000000000')
|
||||
{
|
||||
assert.fail('Random UUID should not be zero');
|
||||
}
|
||||
if (uuid === secondUUID)
|
||||
{
|
||||
assert.fail('Two random UUIDs match! (Not random)');
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
describe('zero', () =>
|
||||
{
|
||||
it ('should generate a zeroed, valid v4 UUID', () =>
|
||||
{
|
||||
const uuid = UUID.zero().toString();
|
||||
if (typeof uuid !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(uuid))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
if (uuid !== '00000000-0000-0000-0000-000000000000')
|
||||
{
|
||||
assert.fail('UUID is not zero')
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
describe('encode/decode', () =>
|
||||
{
|
||||
it ('should correctly decode a 16-byte UUID from a buffer', () =>
|
||||
{
|
||||
const buf = Buffer.from('00004af668bb6fe34893881408f586c5657c4e1c9910', 'hex');
|
||||
const uuid = new UUID(buf, 2);
|
||||
const str = uuid.toString();
|
||||
if (typeof str !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(str))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
if (str !== '4af668bb-6fe3-4893-8814-08f586c5657c')
|
||||
{
|
||||
assert.fail('UUID decoded incorrectly');
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
it ('should correct encode a UUID into the correct position in a buffer', () =>
|
||||
{
|
||||
const buf = Buffer.alloc(22);
|
||||
const uuid = new UUID('4af668bb-6fe3-4893-8814-08f586c5657c');
|
||||
uuid.writeToBuffer(buf, 2);
|
||||
const bufCmp = Buffer.from('00004af668bb6fe34893881408f586c5657c00000000', 'hex');
|
||||
if (buf.compare(bufCmp) !== 0)
|
||||
{
|
||||
assert.fail('Encoded buffer does not match expected output');
|
||||
}
|
||||
const result = uuid.toString();
|
||||
if (typeof result !== 'string')
|
||||
{
|
||||
assert.fail('Returned UUID is not a string');
|
||||
}
|
||||
if (!validator.isUUID(result))
|
||||
{
|
||||
assert.fail('Returned string is not a valid UUID');
|
||||
}
|
||||
assert.ok(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user