Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Open
3 changes: 2 additions & 1 deletion src/hdf5_hl.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ declare type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Arra
* `[i0, i1]` - select all data in the range `i0` to `i1`
* `[i0, i1, s]` - select every `s` values in the range `i0` to `i1`
**/
declare type Slice = [] | [number | null] | [number | null, number | null] | [number | null, number | null, number | null];
declare type SliceElement = number | null;
declare type Slice = [] | [SliceElement] | [SliceElement, SliceElement] | [SliceElement, SliceElement, SliceElement];
export declare type GuessableDataTypes = TypedArray | number | number[] | string | string[] | Reference | Reference[] | RegionReference | RegionReference[];
declare enum OBJECT_TYPE {
DATASET = "Dataset",
Expand Down
61 changes: 42 additions & 19 deletions src/hdf5_hl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,19 @@ function dirname(path: string) {
return head;
}

function check_malloc(nbytes: bigint | number) {
const max_memory = Module.MAXIMUM_MEMORY;
if (nbytes > max_memory) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would convert to safe_nbytes before checking, because > will throw if the two operands aren't both number. Though is this ever called with a bigint?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comparison operators do an automatic conversion (unlike arithmetic operators), but you're right that it's probably only called with number type

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah yes, sorry!

throw new Error(`Requested allocation of ${nbytes} bytes exceeds maximum memory of ${max_memory} bytes`);
}
const safe_nbytes = Number(nbytes);
const ptr = Module._malloc(safe_nbytes);
if (ptr === 0) {
throw new Error(`Memory allocation of ${safe_nbytes} bytes failed`);
}
return ptr;
}

function get_attr(file_id: bigint, obj_name: string, attr_name: string, json_compatible: true): JSONCompatibleOutputData | null;
function get_attr(file_id: bigint, obj_name: string, attr_name: string, json_compatible: false): OutputData | null;
function get_attr(file_id: bigint, obj_name: string, attr_name: string, json_compatible: boolean): OutputData | JSONCompatibleOutputData | null;
Expand All @@ -49,8 +62,8 @@ function get_attr(file_id: bigint, obj_name: string, attr_name: string, json_com
return null;
}

let nbytes = metadata.size * metadata.total_size;
let data_ptr = Module._malloc(nbytes);
const nbytes = metadata.size * metadata.total_size;
let data_ptr = check_malloc(nbytes);
var processed;
try {
Module.get_attribute_data(file_id, obj_name, attr_name, BigInt(data_ptr));
Expand Down Expand Up @@ -146,7 +159,7 @@ function process_data(data: Uint8Array, metadata: Metadata, json_compatible: boo
else if (type === Module.H5T_class_t.H5T_INTEGER.value || type === Module.H5T_class_t.H5T_FLOAT.value) {
const { size, signed} = metadata;
const accessor = getAccessor(type, size, signed);
output_data = new accessor(data.buffer);
output_data = new accessor(data.buffer as ArrayBuffer);
if (json_compatible) {
output_data = [...output_data];
if (accessor === BigInt64Array || accessor === BigUint64Array) {
Expand Down Expand Up @@ -461,7 +474,8 @@ const TypedArray_to_dtype = new Map([
* `[i0, i1]` - select all data in the range `i0` to `i1`
* `[i0, i1, s]` - select every `s` values in the range `i0` to `i1`
**/
type Slice = [] | [number|null] | [number|null,number|null] | [number|null, number|null, number|null];
type SliceElement = number | null;
type Slice = [] | [SliceElement] | [SliceElement, SliceElement] | [SliceElement, SliceElement, SliceElement];

export type GuessableDataTypes = TypedArray | number | number[] | string | string[] | Reference | Reference[] | RegionReference | RegionReference[];

Expand Down Expand Up @@ -573,10 +587,10 @@ export class Attribute {
to_array(): JSONCompatibleOutputData | null {
const { json_value, metadata } = this;
const { shape } = metadata;
if (!isIterable(json_value) || typeof json_value === "string") {
if (!isIterable(json_value) || typeof json_value === "string" || shape === null) {
return json_value;
}
return create_nested_array(json_value, <number[]>shape);
return create_nested_array(json_value, shape);
}
}

Expand Down Expand Up @@ -635,7 +649,7 @@ abstract class HasAttrs {
);
}
else {
let data_ptr = Module._malloc((prepared_data as Uint8Array).byteLength);
let data_ptr = check_malloc((prepared_data as Uint8Array).byteLength);
try {
Module.HEAPU8.set(prepared_data as Uint8Array, data_ptr);
Module.create_attribute(
Expand Down Expand Up @@ -830,7 +844,7 @@ export class Group extends HasAttrs {
);
}
else {
let data_ptr = Module._malloc((prepared_data as Uint8Array).byteLength);
let data_ptr = check_malloc((prepared_data as Uint8Array).byteLength);
try {
Module.HEAPU8.set(prepared_data as Uint8Array, data_ptr);
Module.create_dataset(
Expand Down Expand Up @@ -904,14 +918,23 @@ export class File extends Group {
}
}

const calculateHyperslabParams = (shape: number[],ranges: Slice[]) => {
const calculateHyperslabParams = (shape: number[], ranges: Slice[]) => {
const strides = shape.map((s, i) => BigInt(ranges?.[i]?.[2] ?? 1));
const count = shape.map((s, i) => {
const N = BigInt((Math.min(s, ranges?.[i]?.[1] ?? s) - Math.max(0, ranges?.[i]?.[0] ?? 0)));
const range_upper = ranges?.[i]?.[1] ?? s;
const range_lower = ranges?.[i]?.[0] ?? 0;
const high = (range_upper < s) ? range_upper : s;
const low = (range_lower > 0) ? range_lower : 0;
const N = BigInt(high - low);
const st = strides[i];
return N / st + ((N % st) + st - 1n)/st
return BigInt(N / st + ((N % st) + st - 1n)/st);
});
const offset = shape.map((s, i) => {
const range_lower = ranges?.[i]?.[0] ?? 0;
const low = (range_lower > 0) ? range_lower : 0;
return BigInt((s < low) ? s : low);
});
const offset = shape.map((s, i) => BigInt(Math.min(s, Math.max(0, ranges?.[i]?.[0] ?? 0))));
// reurn BigInt arrays, to match inputs of Module functions
return {strides, count, offset}
}

Expand Down Expand Up @@ -972,7 +995,7 @@ export class Dataset extends HasAttrs {
const {strides, count, offset} = calculateHyperslabParams(shape, ranges);
const total_size = count.reduce((previous, current) => current * previous, 1n);
const nbytes = metadata.size * Number(total_size);
const data_ptr = Module._malloc(nbytes);
const data_ptr = check_malloc(nbytes);
let processed: OutputData;
try {
Module.get_dataset_data(this.file_id, this.path, count, offset, strides, BigInt(data_ptr));
Expand Down Expand Up @@ -1000,8 +1023,8 @@ export class Dataset extends HasAttrs {
// if auto_refresh is on, getting the metadata has triggered a refresh of the dataset_id;
const {strides, count, offset} = calculateHyperslabParams(shape, ranges);

const { data: prepared_data, shape: guessed_shape } = prepare_data(data, metadata, count);
let data_ptr = Module._malloc((prepared_data as Uint8Array).byteLength);
const { data: prepared_data } = prepare_data(data, metadata, count);
let data_ptr = check_malloc((prepared_data as Uint8Array).byteLength);
Module.HEAPU8.set(prepared_data as Uint8Array, data_ptr);

try {
Expand All @@ -1028,10 +1051,10 @@ export class Dataset extends HasAttrs {
to_array(): JSONCompatibleOutputData | null {
const { json_value, metadata } = this;
const { shape } = metadata;
if (!isIterable(json_value) || typeof json_value === "string") {
if (!isIterable(json_value) || typeof json_value === "string" || shape === null) {
return json_value;
}
let nested = create_nested_array(json_value, <number[]>shape);
let nested = create_nested_array(json_value, shape);
return nested;
}

Expand Down Expand Up @@ -1091,7 +1114,7 @@ export class Dataset extends HasAttrs {

// if auto_refresh is on, getting the metadata has triggered a refresh of the dataset_id;
let nbytes = metadata.size * metadata.total_size;
let data_ptr = Module._malloc(nbytes);
let data_ptr = check_malloc(nbytes);
let processed: OutputData;
try {
Module.get_dataset_data(this.file_id, this.path, null, null, null, BigInt(data_ptr));
Expand Down Expand Up @@ -1140,7 +1163,7 @@ export class DatasetRegion {

// if auto_refresh is on, getting the metadata has triggered a refresh of the dataset_id;
let nbytes = metadata.size * metadata.total_size;
let data_ptr = Module._malloc(nbytes);
let data_ptr = check_malloc(nbytes);
let processed: OutputData;
try {
Module.get_region_data(this.source_dataset.file_id, this.region_reference.ref_data, BigInt(data_ptr));
Expand Down
28 changes: 15 additions & 13 deletions src/hdf5_util.cc
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#include "H5PLextern.h"
#include <emscripten/bind.h>
#include <emscripten.h>
#include <emscripten/heap.h>

#define ATTRIBUTE_DATA 0
#define DATASET_DATA 1
Expand Down Expand Up @@ -329,13 +330,13 @@ val get_dtype_metadata(hid_t dtype)
int ndims = H5Tget_array_ndims(dtype);
std::vector<hsize_t> array_dims(ndims);
H5Tget_array_dims2(dtype, &array_dims[0]);
int total_size = 1;
hsize_t total_size = 1;
for (int i=0; i<ndims; i++) {
array_dims_out.set(i, (int)array_dims[i]);
total_size *= (int)array_dims[i];
array_dims_out.set(i, (double)array_dims[i]);
total_size *= array_dims[i];
}
array_type.set("shape", array_dims_out);
array_type.set("total_size", total_size);
array_type.set("total_size", (double)total_size);
attr.set("array_type", array_type);
}
else if (dtype_class == H5T_VLEN) {
Expand Down Expand Up @@ -402,8 +403,8 @@ val get_abstractDS_metadata(hid_t dspace, hid_t dtype, hid_t dcpl)
val attr = get_dtype_metadata(dtype);

int type = H5Sget_simple_extent_type(dspace);
int total_size = H5Sget_simple_extent_npoints(dspace);
attr.set("total_size", total_size);
hsize_t total_size = H5Sget_simple_extent_npoints(dspace);
attr.set("total_size", (double)total_size); // set as Number, not BigInt

if (type == H5S_NULL) {
attr.set("shape", val::null());
Expand All @@ -421,8 +422,8 @@ val get_abstractDS_metadata(hid_t dspace, hid_t dtype, hid_t dcpl)
val shape = val::array();
val maxshape = val::array();
for (int d = 0; d < ndims; d++) {
shape.set(d, (uint)dims_out.at(d));
maxshape.set(d, (uint)maxdims_out.at(d));
shape.set(d, (double)dims_out.at(d));
maxshape.set(d, (double)maxdims_out.at(d));
}

attr.set("shape", shape);
Expand Down Expand Up @@ -1224,8 +1225,8 @@ val get_region_metadata(hid_t loc_id, const val ref_data_in)
// fill in shape, maxshape, chunks, total_size
val metadata = get_abstractDS_metadata(dspace, dtype, dcpl);
// then override the ones that are specific to a region:
int total_size = H5Sget_select_npoints(dspace);
metadata.set("total_size", total_size);
hsize_t total_size = H5Sget_select_npoints(dspace);
metadata.set("total_size", (double)total_size);

int rank = H5Sget_simple_extent_ndims(dspace);
// shape will be null if the selection is not a regular hyperslab
Expand All @@ -1236,11 +1237,11 @@ val get_region_metadata(hid_t loc_id, const val ref_data_in)
std::vector<hsize_t> count(rank);
std::vector<hsize_t> block(rank);
htri_t success = H5Sget_regular_hyperslab(dspace, nullptr, nullptr, count.data(), block.data());
shape = val::array();
shape = val::array(); // elements of type hsize_t
for (int d = 0; d < rank; d++)
{
int blocksize = (block.at(d) == NULL) ? 1 : block.at(d);
shape.set(d, (uint)(count.at(d) * blocksize));
hsize_t blocksize = (block.at(d) == NULL) ? 1 : block.at(d);
shape.set(d, (double)(count.at(d) * blocksize));
}
}
metadata.set("shape", shape);
Expand Down Expand Up @@ -1442,6 +1443,7 @@ EMSCRIPTEN_BINDINGS(hdf5)
constant("H5Z_FILTER_SCALEOFFSET", H5Z_FILTER_SCALEOFFSET);
constant("H5Z_FILTER_RESERVED", H5Z_FILTER_RESERVED);
constant("H5Z_FILTER_MAX", H5Z_FILTER_MAX);
constant("MAXIMUM_MEMORY", emscripten_get_heap_max());

register_vector<std::string>("vector<string>");
}
Expand Down
1 change: 1 addition & 0 deletions src/hdf5_util_helpers.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ export interface H5Module extends EmscriptenModule {
H5Z_FILTER_SCALEOFFSET: 6;
H5Z_FILTER_RESERVED: 256;
H5Z_FILTER_MAX: 65535;
MAXIMUM_MEMORY: number;
create_group(file_id: bigint, name: string, track_order?: boolean): number;
create_vlen_str_dataset(file_id: bigint, dset_name: string, prepared_data: any, shape: bigint[], maxshape: (bigint | null)[], chunks: bigint[] | null, type: number, size: number, signed: boolean, vlen: boolean, track_order?: boolean): number;
get_dataset_data(file_id: bigint, path: string, count: bigint[] | null, offset: bigint[] | null, strides: bigint[] | null, rdata_ptr: bigint): number;
Expand Down
Loading