Skip to content

Commit

Permalink
Handle forEach
Browse files Browse the repository at this point in the history
  • Loading branch information
Pokechu22 committed Feb 26, 2021
1 parent fe54914 commit fd0f3a5
Showing 1 changed file with 27 additions and 0 deletions.
27 changes: 27 additions & 0 deletions burger/toppings/packetinstructions.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,11 @@ def _handle_invoke(classloader, classes, instruction, verbose,
assert isinstance(obj, StackOperand)
obj.value += "(" + _PIT.join(arguments) + ")";
return []
elif name == "forEach":
assert num_arguments == 1
assert not is_static
return _PIT._handle_foreach(classloader, classes, instruction, verbose,
cls, name, desc, obj, arguments[0])
else:
if desc.returns.name != "void":
# Assume that any function that returns something does not write
Expand Down Expand Up @@ -708,6 +713,28 @@ def _handle_3_arg_buffer_call(classloader, classes, instruction, verbose,
else:
raise Exception("Unexpected descriptor " + desc)

@staticmethod
def _handle_foreach(classloader, classes, instruction, verbose,
cls, name, desc, instance, consumer):
assert isinstance(instance, StackOperand)
assert isinstance(consumer, InvokeDynamicInfo)
assert "Consumer" in desc.args[0].name
operations = []
operations.append(Operation(instruction.pos, "store",
type="Iterator", var="it",
value=instance.value + ".iterator()"))
operations.append(Operation(instruction.pos, "loop",
condition="it.hasNext()"))
operations.append(Operation(instruction.pos, "store",
type=consumer.method_desc.args[-1].name.replace("/", "."),
var="itv", value="it.next()"))
operations += _PIT._lambda_operations(
classloader, classes, instruction, verbose, consumer, ["itv"]
)
# See comment in _handle_1_arg_buffer_call
operations.append(Operation(instruction.pos + 1 - SUB_INS_EPSILON, "endloop"))
return operations

@staticmethod
def join(arguments, separator=", "):
"""Converts a list of object into a comma separated list"""
Expand Down

0 comments on commit fd0f3a5

Please sign in to comment.