以SGET协会OSM标准首创有662引脚的OSM模组——凌华智能引领嵌入式运算市场

在可焊接的45 x 45mm尺寸上提升功率

开启嵌入式运算发展的新时代

摘要:

1.开放式标准模块(OSM™),最大尺寸仅45 x 45mm,采用零开销的模块化系统简化生产,并提供662个引脚以增强小型化和物联网应用。

2.凌华智能提供基于NXP i.MX 93的开放式标准模块OSM-IMX93和基于NXP i.MX 8M Plus的开放式标准模块OSM-IMX8MP,首创开放式标准模块模组(OSM模组),实现在嵌入式运算领域的持续突破。

中国上海– 2024年7月2日

行走在全球边缘运算的前端,凌华智能引用SGET协会OSM标准首创OSM模组,并通过这一小巧而强大的模组为嵌入式运算领域带来了新的技术变革。作为SGET协会委员,凌华智能在确立该标准在行业中的地位发挥了关键作用。OSM模块在尺寸缩小、开放标准和可焊接BGA微型模块上实现新突破,无缝适配Arm和x86设计,同时其外型尺寸明显比市面上的嵌入式模组化电脑小。

OSM模组重新定义了尺寸效率,其中最大尺寸为45 x 45mm,比Qseven(70x70mm)小28%,比SMARC(82x50mm)小51%。尽管其体积小巧,OSM模组具有662个引脚,远多于SMARC的314个和Qseven的230个。

BGA设计使得在小面积上实现更多接口成为可能,在小型化和满足日益复杂需求中具有重要作用。对于日益增多的物联网应用而言,该标准有助于结合模组化嵌入式运算的优势,同时满足成本、占用面积和界面日益增加的要求。

此外,OSM模组的功率范围通常在15瓦以下,并采用焊接固定的方案,能够承受极端振动,非常适合对设计有要求且能够承受恶劣环境条件的应用。

凌华智能以OSM产品线为市场引入变革性技术,展现了OSM-IMX93和OSM-IMX8MP模组的优越性,但这仅仅是发展嵌入式运算的开始。

凌华智能高级产品经理Henri Parmentier提到:“作为建立OSM模组的开拓者,我们致力于提供更多创新解决方案,帮助客户发掘新的可能性。OSM模组为嵌入式模组化电脑的持续创新奠定了基础,我们对于能处在每一个边缘运算解决方案的前沿感到很自豪。”

凌华智能还将提供OSM开发工具套件,包括支持全方位综合接口的OSM模块和参考载板,用于现场原型设计和参考。

下面是一个Java读写.hdf5多数据集,且数据集下面有多个Attributes的例子: ```java import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import ncsa.hdf.hdf5lib.H5; import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; public class HDF5Example { public static void main(String[] args) { String filename = "example.h5"; // Create a new HDF5 file createHDF5File(filename); // Write data to multiple datasets with attributes writeDataToHDF5File(filename); // Read data from multiple datasets with attributes readDataFromHDF5File(filename); } public static void createHDF5File(String filename) { try { // Create a new HDF5 file int file_id = H5.H5Fcreate(filename, H5.H5F_ACC_TRUNC, H5.H5P_DEFAULT, H5.H5P_DEFAULT); H5.H5Fclose(file_id); System.out.println("Created HDF5 file: " + filename); } catch (HDF5Exception e) { System.out.println("Error creating HDF5 file: " + e.getMessage()); } } public static void writeDataToHDF5File(String filename) { try { // Open the HDF5 file for writing int file_id = H5.H5Fopen(filename, H5.H5F_ACC_RDWR, H5.H5P_DEFAULT); // Create a group for the datasets int group_id = H5.H5Gcreate(file_id, "/data", H5.H5P_DEFAULT, H5.H5P_DEFAULT, H5.H5P_DEFAULT); // Write data to the first dataset int[] data1 = {1, 2, 3, 4, 5}; long[] dims1 = {5}; int dataspace_id1 = H5.H5Screate_simple(1, dims1, null); int dataset_id1 = H5.H5Dcreate(group_id, "dataset1", H5.H5T_STD_I32LE, dataspace_id1, H5.H5P_DEFAULT, H5.H5P_DEFAULT, H5.H5P_DEFAULT); H5.H5Dwrite(dataset_id1, H5.H5T_NATIVE_INT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data1); // Add attributes to the first dataset int attribute_id1 = H5.H5Acreate(dataset_id1, "attribute1", H5.H5T_STD_I32LE, dataspace_id1, H5.H5P_DEFAULT, H5.H5P_DEFAULT); int[] attribute_data1 = {1}; H5.H5Awrite(attribute_id1, H5.H5T_NATIVE_INT, attribute_data1); int attribute_id2 = H5.H5Acreate(dataset_id1, "attribute2", H5.H5T_STD_I32LE, dataspace_id1, H5.H5P_DEFAULT, H5.H5P_DEFAULT); int[] attribute_data2 = {2}; H5.H5Awrite(attribute_id2, H5.H5T_NATIVE_INT, attribute_data2); // Close the first dataset H5.H5Aclose(attribute_id1); H5.H5Aclose(attribute_id2); H5.H5Dclose(dataset_id1); // Write data to the second dataset float[] data2 = {1.1f, 2.2f, 3.3f, 4.4f, 5.5f}; long[] dims2 = {5}; int dataspace_id2 = H5.H5Screate_simple(1, dims2, null); int dataset_id2 = H5.H5Dcreate(group_id, "dataset2", H5.H5T_IEEE_F32LE, dataspace_id2, H5.H5P_DEFAULT, H5.H5P_DEFAULT, H5.H5P_DEFAULT); H5.H5Dwrite(dataset_id2, H5.H5T_NATIVE_FLOAT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data2); // Add attributes to the second dataset int attribute_id3 = H5.H5Acreate(dataset_id2, "attribute3", H5.H5T_IEEE_F32LE, dataspace_id2, H5.H5P_DEFAULT, H5.H5P_DEFAULT); float[] attribute_data3 = {1.1f}; H5.H5Awrite(attribute_id3, H5.H5T_NATIVE_FLOAT, attribute_data3); int attribute_id4 = H5.H5Acreate(dataset_id2, "attribute4", H5.H5T_IEEE_F32LE, dataspace_id2, H5.H5P_DEFAULT, H5.H5P_DEFAULT); float[] attribute_data4 = {2.2f}; H5.H5Awrite(attribute_id4, H5.H5T_NATIVE_FLOAT, attribute_data4); // Close the second dataset H5.H5Aclose(attribute_id3); H5.H5Aclose(attribute_id4); H5.H5Dclose(dataset_id2); // Close the group H5.H5Gclose(group_id); // Close the HDF5 file H5.H5Fclose(file_id); System.out.println("Wrote data to HDF5 file: " + filename); } catch (HDF5Exception e) { System.out.println("Error writing data to HDF5 file: " + e.getMessage()); } } public static void readDataFromHDF5File(String filename) { try { // Open the HDF5 file for reading int file_id = H5.H5Fopen(filename, H5.H5F_ACC_RDONLY, H5.H5P_DEFAULT); // Open the group for the datasets int group_id = H5.H5Gopen(file_id, "/data", H5.H5P_DEFAULT); // Read data from the first dataset int dataset_id1 = H5.H5Dopen(group_id, "dataset1", H5.H5P_DEFAULT); int dataspace_id1 = H5.H5Dget_space(dataset_id1); long[] dims1 = new long[1]; H5.H5Sget_simple_extent_dims(dataspace_id1, dims1, null); int[] data1 = new int[(int)dims1[0]]; H5.H5Dread(dataset_id1, H5.H5T_NATIVE_INT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data1); // Read attributes from the first dataset int attribute_id1 = H5.H5Aopen(dataset_id1, "attribute1", H5.H5P_DEFAULT); int[] attribute_data1 = new int[1]; H5.H5Aread(attribute_id1, H5.H5T_NATIVE_INT, attribute_data1); int attribute_id2 = H5.H5Aopen(dataset_id1, "attribute2", H5.H5P_DEFAULT); int[] attribute_data2 = new int[1]; H5.H5Aread(attribute_id2, H5.H5T_NATIVE_INT, attribute_data2); // Close the first dataset and attributes H5.H5Aclose(attribute_id1); H5.H5Aclose(attribute_id2); H5.H5Dclose(dataset_id1); // Print the first dataset and attributes System.out.println("Data from dataset1: " + Arrays.toString(data1)); System.out.println("Attribute1 from dataset1: " + attribute_data1[0]); System.out.println("Attribute2 from dataset1: " + attribute_data2[0]); // Read data from the second dataset int dataset_id2 = H5.H5Dopen(group_id, "dataset2", H5.H5P_DEFAULT); int dataspace_id2 = H5.H5Dget_space(dataset_id2); long[] dims2 = new long[1]; H5.H5Sget_simple_extent_dims(dataspace_id2, dims2, null); float[] data2 = new float[(int)dims2[0]]; H5.H5Dread(dataset_id2, H5.H5T_NATIVE_FLOAT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data2); // Read attributes from the second dataset int attribute_id3 = H5.H5Aopen(dataset_id2, "attribute3", H5.H5P_DEFAULT); float[] attribute_data3 = new float[1]; H5.H5Aread(attribute_id3, H5.H5T_NATIVE_FLOAT, attribute_data3); int attribute_id4 = H5.H5Aopen(dataset_id2, "attribute4", H5.H5P_DEFAULT); float[] attribute_data4 = new float[1]; H5.H5Aread(attribute_id4, H5.H5T_NATIVE_FLOAT, attribute_data4); // Close the second dataset and attributes H5.H5Aclose(attribute_id3); H5.H5Aclose(attribute_id4); H5.H5Dclose(dataset_id2); // Print the second dataset and attributes System.out.println("Data from dataset2: " + Arrays.toString(data2)); System.out.println("Attribute3 from dataset2: " + attribute_data3[0]); System.out.println("Attribute4 from dataset2: " + attribute_data4[0]); // Close the group H5.H5Gclose(group_id); // Close the HDF5 file H5.H5Fclose(file_id); System.out.println("Read data from HDF5 file: " + filename); } catch (HDF5Exception e) { System.out.println("Error reading data from HDF5 file: " + e.getMessage()); } } } ``` 这个例子创建了一个名为“example.h5”的新HDF5文件,然后向其中写入两个数据集“dataset1”和“dataset2”,每个数据集下都有两个属性。然后,它从文件中读取数据集和属性,并将它们打印到控制台上。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值