Skip to content

Commit

Permalink
Merge branch 'fastmachinelearning:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
dgburnette authored Dec 11, 2023
2 parents c43c4b5 + 033d438 commit 0c0de41
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 13 deletions.
12 changes: 7 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
<p float="left">
<p align="center">
<img src="https://github.com/fastmachinelearning/fastmachinelearning.github.io/raw/master/images/hls4ml_logo.svg" alt="hls4ml" width="400"/>
</p>

Expand Down Expand Up @@ -144,7 +144,9 @@ Please use the following text for this acknowledgment:
# Funding
We gratefully acknowledge previous and current support from the U.S. National Science Foundation (NSF) Harnessing the Data Revolution (HDR) Institute for <a href="https://a3d3.ai">Accelerating AI Algorithms for Data Driven Discovery (A3D3)</a> under Cooperative Agreement No. <a href="https://www.nsf.gov/awardsearch/showAward?AWD_ID=2117997">OAC-2117997</a>, U.S. Department of Energy (DOE) Office of Science, Office of Advanced Scientific Computing Research under the Real‐time Data Reduction Codesign at the Extreme Edge for Science (XDR) Project (<a href="https://science.osti.gov/-/media/grants/pdf/foas/2021/SC_FOA_0002501.pdf">DE-FOA-0002501</a>), DOE Office of Science, Office of High Energy Physics Early Career Research Program (<a href="https://pamspublic.science.energy.gov/WebPAMSExternal/Interface/Common/ViewPublicAbstract.aspx?rv=df0ae4ab-a46e-481a-9acc-3856b6b041e5&rtc=24&PRoleId=10">DE-SC0021187</a>, DE-0000247070), and the European Research Council (ERC) under the European Union's Horizon 2020 research and innovation program (Grant No. <a href="https://doi.org/10.3030/772369">772369</a>).

<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/d4b6e2a3-3537-4413-9809-8153a7d624d6" alt="A3D3" height="200"/>
<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/16e77374-9829-40a8-800e-8d12018a7cb3" alt="NSF" height="200"/>
<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/de6ca6ea-4d1c-4c56-9d93-f759914bbbf9" alt="DOE" height="200"/>
<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/7a369971-a381-4bb8-932a-7162b173cbac" alt="ERC" height="200"/>
<p align="center">
<img src="https://github.com/fastmachinelearning/hls4ml/assets/29201053/bd1217d4-9930-47b7-8917-ad3fc430c75d" alt="A3D3" width="130"/>
<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/16e77374-9829-40a8-800e-8d12018a7cb3" alt="NSF" width="130"/>
<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/de6ca6ea-4d1c-4c56-9d93-f759914bbbf9" alt="DOE" width="130"/>
<img src="https://github.com/fastmachinelearning/hls4ml/assets/4932543/7a369971-a381-4bb8-932a-7162b173cbac" alt="ERC" width="130"/>
</p>
2 changes: 2 additions & 0 deletions hls4ml/backends/fpga/passes/repack_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ def transform(self, model, node):

# Insert new Repack node instead of Reshape
repack_layer = model.make_node(Repack, 'repack_' + node.name, attrs, node.inputs.copy())
# As result_t attribute is not honored by type conversion, set it manually here
repack_layer.attributes[repack_layer.name].type = node.attributes[node.name].type
model.replace_node(node, repack_layer)

return True
11 changes: 3 additions & 8 deletions hls4ml/model/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,14 +559,9 @@ def update_precision(self, new_precision):
if isinstance(new_precision, (IntegerPrecisionType, XnorPrecisionType, ExponentPrecisionType)):
self.precision_fmt = '{:.0f}'
elif isinstance(new_precision, FixedPrecisionType):
if new_precision.fractional > 0:
# Use str to represent the float with digits, get the length
# to right of decimal point
lsb = 2**-new_precision.fractional
decimal_spaces = len(str(lsb).split('.')[1])
self.precision_fmt = f'{{:.{decimal_spaces}f}}'
else:
self.precision_fmt = '{:.0f}'
decimal_spaces = max(0, new_precision.fractional)
self.precision_fmt = f'{{:.{decimal_spaces}f}}'

else:
raise RuntimeError(f"Unexpected new precision type: {new_precision}")

Expand Down
27 changes: 27 additions & 0 deletions test/pytest/test_repack_precision.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from tensorflow import keras

from hls4ml.converters import convert_from_keras_model


def test_repack_precision():
inp = keras.Input(shape=(3, 3), name='inp')
out = keras.layers.Reshape((3, 3), name='reshape')(inp)
out = keras.layers.Conv1D(2, 2, name='conv')(out)
model = keras.Model(inp, out)

layer_conf = {
'inp': {'Precision': 'fixed<20,10>'},
'reshape': {'Precision': 'fixed<20,10>'},
'conv': {'Precision': 'fixed<20,10>'},
}

hls_config = {'Model': {'Precision': 'fixed<2,1>', 'ReuseFactor': 1}, 'LayerName': layer_conf}

# Repack only happens in io_stream
model_hls = convert_from_keras_model(model, hls_config=hls_config, io_type='io_stream')
assert 'repack_reshape' in model_hls.graph, 'repack_reshape not found in graph'
repack_precision = model_hls.graph['repack_reshape'].attributes['result_t'].precision
assert repack_precision.integer == 10, 'Precision mismatch'
assert repack_precision.fractional == 10, 'Precision mismatch'
assert repack_precision.width == 20, 'Precision mismatch'
assert repack_precision.signed is True, 'Precision mismatch'
33 changes: 33 additions & 0 deletions test/pytest/test_weight_writer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from glob import glob
from pathlib import Path

import keras
import numpy as np
import pytest

import hls4ml

test_root_path = Path(__file__).parent
test_root_path = Path('/tmp/trash')


@pytest.mark.parametrize('k', [0, 1])
@pytest.mark.parametrize('i', [4, 8, 10])
@pytest.mark.parametrize('f', [-2, 0, 2, 7, 14])
def test_weight_writer(k, i, f):
k, b, i = k, k + i + f, k + i
w = np.array([[np.float32(2.0**-f)]])
u = '' if k else 'u'
dtype = f'{u}fixed<{b}, {i}>'
hls_config = {'LayerName': {'dense': {'Precision': {'weight': dtype}}}}
model = keras.Sequential([keras.layers.Dense(1, input_shape=(1,), name='dense')])
model.layers[0].kernel.assign(keras.backend.constant(w))
output_dir = str(test_root_path / f'hls4ml_prj_test_weight_writer_{dtype}')
model_hls = hls4ml.converters.convert_from_keras_model(model, hls_config=hls_config, output_dir=output_dir)
model_hls.write()
w_paths = glob(str(Path(output_dir) / 'firmware/weights/w*.txt'))
print(w_paths[0])
assert len(w_paths) == 1
w_loaded = np.loadtxt(w_paths[0], delimiter=',').reshape(1, 1)
print(f'{w[0,0]:.14}', f'{w_loaded[0,0]:.14}')
assert np.all(w == w_loaded)

0 comments on commit 0c0de41

Please sign in to comment.