aboutsummaryrefslogtreecommitdiffstats
path: root/libsolidity/codegen/ExpressionCompiler.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'libsolidity/codegen/ExpressionCompiler.cpp')
-rw-r--r--libsolidity/codegen/ExpressionCompiler.cpp84
1 files changed, 69 insertions, 15 deletions
diff --git a/libsolidity/codegen/ExpressionCompiler.cpp b/libsolidity/codegen/ExpressionCompiler.cpp
index f50628ff..9e2d30d5 100644
--- a/libsolidity/codegen/ExpressionCompiler.cpp
+++ b/libsolidity/codegen/ExpressionCompiler.cpp
@@ -139,8 +139,8 @@ void ExpressionCompiler::appendStateVariableAccessor(VariableDeclaration const&
utils().popStackSlots(paramTypes.size() - 1);
}
unsigned retSizeOnStack = 0;
- solAssert(accessorType.returnParameterTypes().size() >= 1, "");
- auto const& returnTypes = accessorType.returnParameterTypes();
+ auto returnTypes = accessorType.returnParameterTypes();
+ solAssert(returnTypes.size() >= 1, "");
if (StructType const* structType = dynamic_cast<StructType const*>(returnType.get()))
{
// remove offset
@@ -518,7 +518,23 @@ bool ExpressionCompiler::visit(FunctionCall const& _functionCall)
arguments[i]->accept(*this);
utils().convertType(*arguments[i]->annotation().type, *function.parameterTypes()[i]);
}
- _functionCall.expression().accept(*this);
+
+ {
+ bool shortcutTaken = false;
+ if (auto identifier = dynamic_cast<Identifier const*>(&_functionCall.expression()))
+ if (auto functionDef = dynamic_cast<FunctionDefinition const*>(identifier->annotation().referencedDeclaration))
+ {
+ // Do not directly visit the identifier, because this way, we can avoid
+ // the runtime entry label to be created at the creation time context.
+ CompilerContext::LocationSetter locationSetter2(m_context, *identifier);
+ utils().pushCombinedFunctionEntryLabel(m_context.resolveVirtualFunction(*functionDef), false);
+ shortcutTaken = true;
+ }
+
+ if (!shortcutTaken)
+ _functionCall.expression().accept(*this);
+ }
+
unsigned parameterSize = CompilerUtils::sizeOnStack(function.parameterTypes());
if (function.bound())
{
@@ -1359,6 +1375,10 @@ void ExpressionCompiler::endVisit(Identifier const& _identifier)
}
}
else if (FunctionDefinition const* functionDef = dynamic_cast<FunctionDefinition const*>(declaration))
+ // If the identifier is called right away, this code is executed in visit(FunctionCall...), because
+ // we want to avoid having a reference to the runtime function entry point in the
+ // constructor context, since this would force the compiler to include unreferenced
+ // internal functions in the runtime contex.
utils().pushCombinedFunctionEntryLabel(m_context.resolveVirtualFunction(*functionDef));
else if (auto variable = dynamic_cast<VariableDeclaration const*>(declaration))
appendVariable(*variable, static_cast<Expression const&>(_identifier));
@@ -1618,15 +1638,27 @@ void ExpressionCompiler::appendExternalFunctionCall(
m_context.experimentalFeatureActive(ExperimentalFeature::V050) &&
m_context.evmVersion().hasStaticCall();
+ bool haveReturndatacopy = m_context.evmVersion().supportsReturndata();
unsigned retSize = 0;
+ TypePointers returnTypes;
if (returnSuccessCondition)
retSize = 0; // return value actually is success condition
+ else if (haveReturndatacopy)
+ returnTypes = _functionType.returnParameterTypes();
else
- for (auto const& retType: _functionType.returnParameterTypes())
+ returnTypes = _functionType.returnParameterTypesWithoutDynamicTypes();
+
+ bool dynamicReturnSize = false;
+ for (auto const& retType: returnTypes)
+ if (retType->isDynamicallyEncoded())
{
- solAssert(!retType->isDynamicallySized(), "Unable to return dynamic type from external call.");
- retSize += retType->calldataEncodedSize();
+ solAssert(haveReturndatacopy, "");
+ dynamicReturnSize = true;
+ retSize = 0;
+ break;
}
+ else
+ retSize += retType->calldataEncodedSize();
// Evaluate arguments.
TypePointers argumentTypes;
@@ -1824,20 +1856,42 @@ void ExpressionCompiler::appendExternalFunctionCall(
utils().fetchFreeMemoryPointer();
m_context << Instruction::SUB << Instruction::MLOAD;
}
- else if (!_functionType.returnParameterTypes().empty())
+ else if (!returnTypes.empty())
{
utils().fetchFreeMemoryPointer();
- bool memoryNeeded = false;
- for (auto const& retType: _functionType.returnParameterTypes())
+ // Stack: return_data_start
+
+ // The old decoder did not allocate any memory (i.e. did not touch the free
+ // memory pointer), but kept references to the return data for
+ // (statically-sized) arrays
+ bool needToUpdateFreeMemoryPtr = false;
+ if (dynamicReturnSize || m_context.experimentalFeatureActive(ExperimentalFeature::ABIEncoderV2))
+ needToUpdateFreeMemoryPtr = true;
+ else
+ for (auto const& retType: returnTypes)
+ if (dynamic_cast<ReferenceType const*>(retType.get()))
+ needToUpdateFreeMemoryPtr = true;
+
+ // Stack: return_data_start
+ if (dynamicReturnSize)
{
- utils().loadFromMemoryDynamic(*retType, false, true, true);
- if (dynamic_cast<ReferenceType const*>(retType.get()))
- memoryNeeded = true;
+ solAssert(haveReturndatacopy, "");
+ m_context.appendInlineAssembly("{ returndatacopy(return_data_start, 0, returndatasize()) }", {"return_data_start"});
}
- if (memoryNeeded)
- utils().storeFreeMemoryPointer();
else
- m_context << Instruction::POP;
+ solAssert(retSize > 0, "");
+ // Always use the actual return length, and not our calculated expected length, if returndatacopy is supported.
+ // This ensures it can catch badly formatted input from external calls.
+ m_context << (haveReturndatacopy ? eth::AssemblyItem(Instruction::RETURNDATASIZE) : u256(retSize));
+ // Stack: return_data_start return_data_size
+ if (needToUpdateFreeMemoryPtr)
+ m_context.appendInlineAssembly(R"({
+ // round size to the next multiple of 32
+ let newMem := add(start, and(add(size, 0x1f), not(0x1f)))
+ mstore(0x40, newMem)
+ })", {"start", "size"});
+
+ utils().abiDecode(returnTypes, true, true);
}
}