gccrs: Add HIR to GCC GENERIC lowering entry point

This patch contains the entry point and utilities used for the lowering
of HIR nodes to `tree`s. It also contains a constant evaluator, ported
over from the C++ frontend.

	gcc/rust/
	* backend/rust-compile-context.cc: New.
	* backend/rust-compile-context.h: New.
	* backend/rust-compile.cc: New.
	* backend/rust-compile.h: New.
	* backend/rust-constexpr.cc: New.
	* backend/rust-constexpr.h: New.

Co-authored-by: David Faust <david.faust@oracle.com>
Co-authored-by: Faisal Abbas <90.abbasfaisal@gmail.com>
Signed-off-by: Faisal Abbas <90.abbasfaisal@gmail.com>
This commit is contained in:
Philip Herron 2022-10-21 14:29:50 +02:00 committed by Arthur Cohen
parent 019b2f1558
commit cfbda2f78b
6 changed files with 1414 additions and 0 deletions

View File

@ -0,0 +1,146 @@
// Copyright (C) 2020-2022 Free Software Foundation, Inc.
// This file is part of GCC.
// GCC is free software; you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 3, or (at your option) any later
// version.
// GCC is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// for more details.
// You should have received a copy of the GNU General Public License
// along with GCC; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include "rust-compile-context.h"
#include "rust-compile-type.h"
namespace Rust {
namespace Compile {
Context::Context (::Backend *backend)
: backend (backend), resolver (Resolver::Resolver::get ()),
tyctx (Resolver::TypeCheckContext::get ()),
mappings (Analysis::Mappings::get ()), mangler (Mangler ())
{
setup_builtins ();
}
void
Context::setup_builtins ()
{
auto builtins = resolver->get_builtin_types ();
for (auto it = builtins.begin (); it != builtins.end (); it++)
{
HirId ref;
bool ok = tyctx->lookup_type_by_node_id ((*it)->get_node_id (), &ref);
rust_assert (ok);
TyTy::BaseType *lookup;
ok = tyctx->lookup_type (ref, &lookup);
rust_assert (ok);
TyTyResolveCompile::compile (this, lookup);
}
}
hashval_t
Context::type_hasher (tree type)
{
inchash::hash hstate;
hstate.add_int (TREE_CODE (type));
if (TYPE_NAME (type))
{
hashval_t record_name_hash
= IDENTIFIER_HASH_VALUE (DECL_NAME (TYPE_NAME (type)));
hstate.add_object (record_name_hash);
}
for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
/* Just the identifier is adequate to distinguish. */
hstate.add_object (IDENTIFIER_HASH_VALUE (TREE_PURPOSE (t)));
switch (TREE_CODE (type))
{
case METHOD_TYPE:
hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
/* FALLTHROUGH. */
case FUNCTION_TYPE:
for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
if (TREE_VALUE (t) != error_mark_node)
hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
break;
case OFFSET_TYPE:
hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
break;
case ARRAY_TYPE: {
if (TYPE_DOMAIN (type))
hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
{
unsigned typeless = TYPE_TYPELESS_STORAGE (type);
hstate.add_object (typeless);
}
}
break;
case INTEGER_TYPE: {
tree t = TYPE_MAX_VALUE (type);
if (!t)
t = TYPE_MIN_VALUE (type);
for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
hstate.add_object (TREE_INT_CST_ELT (t, i));
break;
}
case REAL_TYPE:
case FIXED_POINT_TYPE: {
unsigned prec = TYPE_PRECISION (type);
hstate.add_object (prec);
break;
}
case VECTOR_TYPE:
hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
break;
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE: {
for (tree t = TYPE_FIELDS (type); t; t = TREE_CHAIN (t))
{
hashval_t name_hash = IDENTIFIER_HASH_VALUE (DECL_NAME (t));
hashval_t type_hash = type_hasher (TREE_TYPE (t));
hstate.add_object (name_hash);
hstate.add_object (type_hash);
}
}
break;
case BOOLEAN_TYPE:
break;
case REFERENCE_TYPE:
case POINTER_TYPE: {
hashval_t type_hash = type_hasher (TREE_TYPE (type));
hstate.add_object (type_hash);
}
break;
default:
break;
}
return hstate.end ();
}
} // namespace Compile
} // namespace Rust

View File

@ -0,0 +1,343 @@
// Copyright (C) 2020-2022 Free Software Foundation, Inc.
// This file is part of GCC.
// GCC is free software; you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 3, or (at your option) any later
// version.
// GCC is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// for more details.
// You should have received a copy of the GNU General Public License
// along with GCC; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#ifndef RUST_COMPILE_CONTEXT
#define RUST_COMPILE_CONTEXT
#include "rust-system.h"
#include "rust-hir-map.h"
#include "rust-name-resolver.h"
#include "rust-hir-type-check.h"
#include "rust-backend.h"
#include "rust-hir-full.h"
#include "rust-mangle.h"
#include "rust-tree.h"
namespace Rust {
namespace Compile {
struct fncontext
{
tree fndecl;
::Bvariable *ret_addr;
};
class Context
{
public:
Context (::Backend *backend);
void setup_builtins ();
bool lookup_compiled_types (tree t, tree *type)
{
hashval_t h = type_hasher (t);
auto it = compiled_type_map.find (h);
if (it == compiled_type_map.end ())
return false;
*type = it->second;
return true;
}
tree insert_compiled_type (tree type)
{
hashval_t h = type_hasher (type);
auto it = compiled_type_map.find (h);
if (it != compiled_type_map.end ())
return it->second;
compiled_type_map.insert ({h, type});
push_type (type);
return type;
}
tree insert_main_variant (tree type)
{
hashval_t h = type_hasher (type);
auto it = main_variants.find (h);
if (it != main_variants.end ())
return it->second;
main_variants.insert ({h, type});
return type;
}
::Backend *get_backend () { return backend; }
Resolver::Resolver *get_resolver () { return resolver; }
Resolver::TypeCheckContext *get_tyctx () { return tyctx; }
Analysis::Mappings *get_mappings () { return mappings; }
void push_block (tree scope)
{
scope_stack.push_back (scope);
statements.push_back ({});
}
tree pop_block ()
{
auto block = scope_stack.back ();
scope_stack.pop_back ();
auto stmts = statements.back ();
statements.pop_back ();
backend->block_add_statements (block, stmts);
return block;
}
tree peek_enclosing_scope ()
{
if (scope_stack.size () == 0)
return nullptr;
return scope_stack.back ();
}
void add_statement_to_enclosing_scope (tree stmt)
{
statements.at (statements.size () - 2).push_back (stmt);
}
void add_statement (tree stmt) { statements.back ().push_back (stmt); }
void insert_var_decl (HirId id, ::Bvariable *decl)
{
compiled_var_decls[id] = decl;
}
bool lookup_var_decl (HirId id, ::Bvariable **decl)
{
auto it = compiled_var_decls.find (id);
if (it == compiled_var_decls.end ())
return false;
*decl = it->second;
return true;
}
void insert_function_decl (const TyTy::FnType *ref, tree fn)
{
auto id = ref->get_ty_ref ();
auto dId = ref->get_id ();
rust_assert (compiled_fn_map.find (id) == compiled_fn_map.end ());
compiled_fn_map[id] = fn;
auto it = mono_fns.find (dId);
if (it == mono_fns.end ())
mono_fns[dId] = {};
mono_fns[dId].push_back ({ref, fn});
}
bool lookup_function_decl (HirId id, tree *fn, DefId dId = UNKNOWN_DEFID,
const TyTy::BaseType *ref = nullptr)
{
// for for any monomorphized fns
if (ref != nullptr)
{
rust_assert (dId != UNKNOWN_DEFID);
auto it = mono_fns.find (dId);
if (it == mono_fns.end ())
return false;
for (auto &e : mono_fns[dId])
{
const TyTy::BaseType *r = e.first;
tree f = e.second;
if (ref->is_equal (*r))
{
*fn = f;
return true;
}
}
return false;
}
auto it = compiled_fn_map.find (id);
if (it == compiled_fn_map.end ())
return false;
*fn = it->second;
return true;
}
void insert_const_decl (HirId id, tree expr) { compiled_consts[id] = expr; }
bool lookup_const_decl (HirId id, tree *expr)
{
auto it = compiled_consts.find (id);
if (it == compiled_consts.end ())
return false;
*expr = it->second;
return true;
}
void insert_label_decl (HirId id, tree label) { compiled_labels[id] = label; }
bool lookup_label_decl (HirId id, tree *label)
{
auto it = compiled_labels.find (id);
if (it == compiled_labels.end ())
return false;
*label = it->second;
return true;
}
void insert_pattern_binding (HirId id, tree binding)
{
implicit_pattern_bindings[id] = binding;
}
bool lookup_pattern_binding (HirId id, tree *binding)
{
auto it = implicit_pattern_bindings.find (id);
if (it == implicit_pattern_bindings.end ())
return false;
*binding = it->second;
return true;
}
void push_fn (tree fn, ::Bvariable *ret_addr)
{
fn_stack.push_back (fncontext{fn, ret_addr});
}
void pop_fn () { fn_stack.pop_back (); }
bool in_fn () { return fn_stack.size () != 0; }
// Note: it is undefined behavior to call peek_fn () if fn_stack is empty.
fncontext peek_fn ()
{
rust_assert (!fn_stack.empty ());
return fn_stack.back ();
}
void push_type (tree t) { type_decls.push_back (t); }
void push_var (::Bvariable *v) { var_decls.push_back (v); }
void push_const (tree c) { const_decls.push_back (c); }
void push_function (tree f) { func_decls.push_back (f); }
void write_to_backend ()
{
backend->write_global_definitions (type_decls, const_decls, func_decls,
var_decls);
}
bool function_completed (tree fn)
{
for (auto it = func_decls.begin (); it != func_decls.end (); it++)
{
tree i = (*it);
if (i == fn)
{
return true;
}
}
return false;
}
void push_loop_context (Bvariable *var) { loop_value_stack.push_back (var); }
Bvariable *peek_loop_context () { return loop_value_stack.back (); }
Bvariable *pop_loop_context ()
{
auto back = loop_value_stack.back ();
loop_value_stack.pop_back ();
return back;
}
void push_loop_begin_label (tree label)
{
loop_begin_labels.push_back (label);
}
tree peek_loop_begin_label () { return loop_begin_labels.back (); }
tree pop_loop_begin_label ()
{
tree pop = loop_begin_labels.back ();
loop_begin_labels.pop_back ();
return pop;
}
void push_const_context (void) { const_context++; }
void pop_const_context (void)
{
if (const_context > 0)
const_context--;
}
bool const_context_p (void) { return (const_context > 0); }
std::string mangle_item (const TyTy::BaseType *ty,
const Resolver::CanonicalPath &path) const
{
return mangler.mangle_item (ty, path);
}
std::vector<tree> &get_type_decls () { return type_decls; }
std::vector<::Bvariable *> &get_var_decls () { return var_decls; }
std::vector<tree> &get_const_decls () { return const_decls; }
std::vector<tree> &get_func_decls () { return func_decls; }
static hashval_t type_hasher (tree type);
private:
::Backend *backend;
Resolver::Resolver *resolver;
Resolver::TypeCheckContext *tyctx;
Analysis::Mappings *mappings;
Mangler mangler;
// state
std::vector<fncontext> fn_stack;
std::map<HirId, ::Bvariable *> compiled_var_decls;
std::map<hashval_t, tree> compiled_type_map;
std::map<HirId, tree> compiled_fn_map;
std::map<HirId, tree> compiled_consts;
std::map<HirId, tree> compiled_labels;
std::vector<::std::vector<tree>> statements;
std::vector<tree> scope_stack;
std::vector<::Bvariable *> loop_value_stack;
std::vector<tree> loop_begin_labels;
std::map<DefId, std::vector<std::pair<const TyTy::BaseType *, tree>>>
mono_fns;
std::map<HirId, tree> implicit_pattern_bindings;
std::map<hashval_t, tree> main_variants;
// To GCC middle-end
std::vector<tree> type_decls;
std::vector<::Bvariable *> var_decls;
std::vector<tree> const_decls;
std::vector<tree> func_decls;
// Nonzero iff we are currently compiling something inside a constant context.
unsigned int const_context = 0;
};
} // namespace Compile
} // namespace Rust
#endif // RUST_COMPILE_CONTEXT

View File

@ -0,0 +1,414 @@
// Copyright (C) 2020-2022 Free Software Foundation, Inc.
// This file is part of GCC.
// GCC is free software; you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 3, or (at your option) any later
// version.
// GCC is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// for more details.
// You should have received a copy of the GNU General Public License
// along with GCC; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include "rust-compile.h"
#include "rust-compile-item.h"
#include "rust-compile-implitem.h"
#include "rust-compile-expr.h"
#include "rust-compile-struct-field-expr.h"
#include "rust-compile-stmt.h"
#include "rust-hir-trait-resolve.h"
#include "rust-hir-path-probe.h"
#include "rust-hir-type-bounds.h"
#include "rust-hir-dot-operator.h"
#include "rust-compile-block.h"
namespace Rust {
namespace Compile {
CompileCrate::CompileCrate (HIR::Crate &crate, Context *ctx)
: crate (crate), ctx (ctx)
{}
CompileCrate::~CompileCrate () {}
void
CompileCrate::Compile (HIR::Crate &crate, Context *ctx)
{
CompileCrate c (crate, ctx);
c.go ();
}
void
CompileCrate::go ()
{
for (auto &item : crate.items)
CompileItem::compile (item.get (), ctx);
}
// Shared methods in compilation
tree
HIRCompileBase::coercion_site (HirId id, tree rvalue,
const TyTy::BaseType *rval,
const TyTy::BaseType *lval,
Location lvalue_locus, Location rvalue_locus)
{
std::vector<Resolver::Adjustment> *adjustments = nullptr;
bool ok = ctx->get_tyctx ()->lookup_autoderef_mappings (id, &adjustments);
if (ok)
{
rvalue = resolve_adjustements (*adjustments, rvalue, rvalue_locus);
}
return coercion_site1 (rvalue, rval, lval, lvalue_locus, rvalue_locus);
}
tree
HIRCompileBase::coercion_site1 (tree rvalue, const TyTy::BaseType *rval,
const TyTy::BaseType *lval,
Location lvalue_locus, Location rvalue_locus)
{
if (rvalue == error_mark_node)
return error_mark_node;
const TyTy::BaseType *actual = rval->destructure ();
const TyTy::BaseType *expected = lval->destructure ();
if (expected->get_kind () == TyTy::TypeKind::REF)
{
// this is a dyn object
if (SLICE_TYPE_P (TREE_TYPE (rvalue)))
{
return rvalue;
}
// bad coercion... of something to a reference
if (actual->get_kind () != TyTy::TypeKind::REF)
return error_mark_node;
const TyTy::ReferenceType *exp
= static_cast<const TyTy::ReferenceType *> (expected);
const TyTy::ReferenceType *act
= static_cast<const TyTy::ReferenceType *> (actual);
tree deref_rvalue = indirect_expression (rvalue, rvalue_locus);
tree coerced
= coercion_site1 (deref_rvalue, act->get_base (), exp->get_base (),
lvalue_locus, rvalue_locus);
if (exp->is_dyn_object () && SLICE_TYPE_P (TREE_TYPE (coerced)))
return coerced;
return address_expression (coerced, rvalue_locus);
}
else if (expected->get_kind () == TyTy::TypeKind::POINTER)
{
// this is a dyn object
if (SLICE_TYPE_P (TREE_TYPE (rvalue)))
{
return rvalue;
}
// bad coercion... of something to a reference
bool valid_coercion = actual->get_kind () == TyTy::TypeKind::REF
|| actual->get_kind () == TyTy::TypeKind::POINTER;
if (!valid_coercion)
return error_mark_node;
const TyTy::ReferenceType *exp
= static_cast<const TyTy::ReferenceType *> (expected);
TyTy::BaseType *actual_base = nullptr;
if (actual->get_kind () == TyTy::TypeKind::REF)
{
const TyTy::ReferenceType *act
= static_cast<const TyTy::ReferenceType *> (actual);
actual_base = act->get_base ();
}
else if (actual->get_kind () == TyTy::TypeKind::POINTER)
{
const TyTy::PointerType *act
= static_cast<const TyTy::PointerType *> (actual);
actual_base = act->get_base ();
}
rust_assert (actual_base != nullptr);
tree deref_rvalue = indirect_expression (rvalue, rvalue_locus);
tree coerced
= coercion_site1 (deref_rvalue, actual_base, exp->get_base (),
lvalue_locus, rvalue_locus);
if (exp->is_dyn_object () && SLICE_TYPE_P (TREE_TYPE (coerced)))
return coerced;
return address_expression (coerced, rvalue_locus);
}
else if (expected->get_kind () == TyTy::TypeKind::ARRAY)
{
if (actual->get_kind () != TyTy::TypeKind::ARRAY)
return error_mark_node;
tree tree_rval_type = TyTyResolveCompile::compile (ctx, actual);
tree tree_lval_type = TyTyResolveCompile::compile (ctx, expected);
if (!verify_array_capacities (tree_lval_type, tree_rval_type,
lvalue_locus, rvalue_locus))
return error_mark_node;
}
else if (expected->get_kind () == TyTy::TypeKind::SLICE)
{
// bad coercion
bool valid_coercion = actual->get_kind () == TyTy::TypeKind::SLICE
|| actual->get_kind () == TyTy::TypeKind::ARRAY;
if (!valid_coercion)
return error_mark_node;
// nothing to do here
if (actual->get_kind () == TyTy::TypeKind::SLICE)
return rvalue;
// return an unsized coercion
Resolver::Adjustment unsize_adj (
Resolver::Adjustment::AdjustmentType::UNSIZE, actual, expected);
return resolve_unsized_adjustment (unsize_adj, rvalue, rvalue_locus);
}
return rvalue;
}
tree
HIRCompileBase::coerce_to_dyn_object (tree compiled_ref,
const TyTy::BaseType *actual,
const TyTy::DynamicObjectType *ty,
Location locus)
{
tree dynamic_object = TyTyResolveCompile::compile (ctx, ty);
tree dynamic_object_fields = TYPE_FIELDS (dynamic_object);
tree vtable_field = DECL_CHAIN (dynamic_object_fields);
rust_assert (TREE_CODE (TREE_TYPE (vtable_field)) == ARRAY_TYPE);
//' this assumes ordering and current the structure is
// __trait_object_ptr
// [list of function ptrs]
std::vector<std::pair<Resolver::TraitReference *, HIR::ImplBlock *>>
probed_bounds_for_receiver = Resolver::TypeBoundsProbe::Probe (actual);
tree address_of_compiled_ref = null_pointer_node;
if (!actual->is_unit ())
address_of_compiled_ref = address_expression (compiled_ref, locus);
std::vector<tree> vtable_ctor_elems;
std::vector<unsigned long> vtable_ctor_idx;
unsigned long i = 0;
for (auto &bound : ty->get_object_items ())
{
const Resolver::TraitItemReference *item = bound.first;
const TyTy::TypeBoundPredicate *predicate = bound.second;
auto address = compute_address_for_trait_item (item, predicate,
probed_bounds_for_receiver,
actual, actual, locus);
vtable_ctor_elems.push_back (address);
vtable_ctor_idx.push_back (i++);
}
tree vtable_ctor = ctx->get_backend ()->array_constructor_expression (
TREE_TYPE (vtable_field), vtable_ctor_idx, vtable_ctor_elems, locus);
std::vector<tree> dyn_ctor = {address_of_compiled_ref, vtable_ctor};
return ctx->get_backend ()->constructor_expression (dynamic_object, false,
dyn_ctor, -1, locus);
}
tree
HIRCompileBase::compute_address_for_trait_item (
const Resolver::TraitItemReference *ref,
const TyTy::TypeBoundPredicate *predicate,
std::vector<std::pair<Resolver::TraitReference *, HIR::ImplBlock *>>
&receiver_bounds,
const TyTy::BaseType *receiver, const TyTy::BaseType *root, Location locus)
{
// There are two cases here one where its an item which has an implementation
// within a trait-impl-block. Then there is the case where there is a default
// implementation for this within the trait.
//
// The awkward part here is that this might be a generic trait and we need to
// figure out the correct monomorphized type for this so we can resolve the
// address of the function , this is stored as part of the
// type-bound-predicate
//
// Algo:
// check if there is an impl-item for this trait-item-ref first
// else assert that the trait-item-ref has an implementation
TyTy::TypeBoundPredicateItem predicate_item
= predicate->lookup_associated_item (ref->get_identifier ());
rust_assert (!predicate_item.is_error ());
// this is the expected end type
TyTy::BaseType *trait_item_type = predicate_item.get_tyty_for_receiver (root);
rust_assert (trait_item_type->get_kind () == TyTy::TypeKind::FNDEF);
TyTy::FnType *trait_item_fntype
= static_cast<TyTy::FnType *> (trait_item_type);
// find impl-block for this trait-item-ref
HIR::ImplBlock *associated_impl_block = nullptr;
const Resolver::TraitReference *predicate_trait_ref = predicate->get ();
for (auto &item : receiver_bounds)
{
Resolver::TraitReference *trait_ref = item.first;
HIR::ImplBlock *impl_block = item.second;
if (predicate_trait_ref->is_equal (*trait_ref))
{
associated_impl_block = impl_block;
break;
}
}
// FIXME this probably should just return error_mark_node but this helps
// debug for now since we are wrongly returning early on type-resolution
// failures, until we take advantage of more error types and error_mark_node
rust_assert (associated_impl_block != nullptr);
// lookup self for the associated impl
std::unique_ptr<HIR::Type> &self_type_path
= associated_impl_block->get_type ();
TyTy::BaseType *self = nullptr;
bool ok = ctx->get_tyctx ()->lookup_type (
self_type_path->get_mappings ().get_hirid (), &self);
rust_assert (ok);
// lookup the predicate item from the self
TyTy::TypeBoundPredicate *self_bound = nullptr;
for (auto &bound : self->get_specified_bounds ())
{
const Resolver::TraitReference *bound_ref = bound.get ();
const Resolver::TraitReference *specified_ref = predicate->get ();
if (bound_ref->is_equal (*specified_ref))
{
self_bound = &bound;
break;
}
}
rust_assert (self_bound != nullptr);
// lookup the associated item from the associated impl block
TyTy::TypeBoundPredicateItem associated_self_item
= self_bound->lookup_associated_item (ref->get_identifier ());
rust_assert (!associated_self_item.is_error ());
TyTy::BaseType *mono1 = associated_self_item.get_tyty_for_receiver (self);
rust_assert (mono1 != nullptr);
rust_assert (mono1->get_kind () == TyTy::TypeKind::FNDEF);
TyTy::FnType *assocated_item_ty1 = static_cast<TyTy::FnType *> (mono1);
// Lookup the impl-block for the associated impl_item if it exists
HIR::Function *associated_function = nullptr;
for (auto &impl_item : associated_impl_block->get_impl_items ())
{
bool is_function = impl_item->get_impl_item_type ()
== HIR::ImplItem::ImplItemType::FUNCTION;
if (!is_function)
continue;
HIR::Function *fn = static_cast<HIR::Function *> (impl_item.get ());
bool found_associated_item
= fn->get_function_name ().compare (ref->get_identifier ()) == 0;
if (found_associated_item)
associated_function = fn;
}
// we found an impl_item for this
if (associated_function != nullptr)
{
// lookup the associated type for this item
TyTy::BaseType *lookup = nullptr;
bool ok = ctx->get_tyctx ()->lookup_type (
associated_function->get_mappings ().get_hirid (), &lookup);
rust_assert (ok);
rust_assert (lookup->get_kind () == TyTy::TypeKind::FNDEF);
TyTy::FnType *lookup_fntype = static_cast<TyTy::FnType *> (lookup);
if (lookup_fntype->needs_substitution ())
{
TyTy::SubstitutionArgumentMappings mappings
= assocated_item_ty1->solve_missing_mappings_from_this (
*trait_item_fntype, *lookup_fntype);
lookup_fntype = lookup_fntype->handle_substitions (mappings);
}
return CompileInherentImplItem::Compile (associated_function, ctx,
lookup_fntype, true, locus);
}
// we can only compile trait-items with a body
bool trait_item_has_definition = ref->is_optional ();
rust_assert (trait_item_has_definition);
HIR::TraitItem *trait_item = ref->get_hir_trait_item ();
return CompileTraitItem::Compile (trait_item, ctx, trait_item_fntype, true,
locus);
}
bool
HIRCompileBase::verify_array_capacities (tree ltype, tree rtype,
Location lvalue_locus,
Location rvalue_locus)
{
rust_assert (ltype != NULL_TREE);
rust_assert (rtype != NULL_TREE);
// lets just return ok as other errors have already occurred
if (ltype == error_mark_node || rtype == error_mark_node)
return true;
tree ltype_domain = TYPE_DOMAIN (ltype);
if (!ltype_domain)
return false;
if (!TREE_CONSTANT (TYPE_MAX_VALUE (ltype_domain)))
return false;
unsigned HOST_WIDE_INT ltype_length
= wi::ext (wi::to_offset (TYPE_MAX_VALUE (ltype_domain))
- wi::to_offset (TYPE_MIN_VALUE (ltype_domain)) + 1,
TYPE_PRECISION (TREE_TYPE (ltype_domain)),
TYPE_SIGN (TREE_TYPE (ltype_domain)))
.to_uhwi ();
tree rtype_domain = TYPE_DOMAIN (rtype);
if (!rtype_domain)
return false;
if (!TREE_CONSTANT (TYPE_MAX_VALUE (rtype_domain)))
return false;
unsigned HOST_WIDE_INT rtype_length
= wi::ext (wi::to_offset (TYPE_MAX_VALUE (rtype_domain))
- wi::to_offset (TYPE_MIN_VALUE (rtype_domain)) + 1,
TYPE_PRECISION (TREE_TYPE (rtype_domain)),
TYPE_SIGN (TREE_TYPE (rtype_domain)))
.to_uhwi ();
if (ltype_length != rtype_length)
{
rust_error_at (
rvalue_locus,
"expected an array with a fixed size of " HOST_WIDE_INT_PRINT_UNSIGNED
" elements, found one with " HOST_WIDE_INT_PRINT_UNSIGNED " elements",
ltype_length, rtype_length);
return false;
}
return true;
}
} // namespace Compile
} // namespace Rust

View File

@ -0,0 +1,47 @@
// Copyright (C) 2020-2022 Free Software Foundation, Inc.
// This file is part of GCC.
// GCC is free software; you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 3, or (at your option) any later
// version.
// GCC is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// for more details.
// You should have received a copy of the GNU General Public License
// along with GCC; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#ifndef RUST_COMPILE_H
#define RUST_COMPILE_H
#include "rust-system.h"
#include "rust-hir-full.h"
#include "rust-compile-context.h"
namespace Rust {
namespace Compile {
class CompileCrate
{
public:
static void Compile (HIR::Crate &crate, Context *ctx);
~CompileCrate ();
private:
CompileCrate (HIR::Crate &crate, Context *ctx);
void go ();
HIR::Crate &crate;
Context *ctx;
};
} // namespace Compile
} // namespace Rust
#endif // RUST_COMPILE_H

View File

@ -0,0 +1,433 @@
// This file is part of GCC.
// GCC is free software; you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 3, or (at your option) any later
// version.
// GCC is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// for more details.
// You should have received a copy of the GNU General Public License
// along with GCC; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include "rust-constexpr.h"
#include "rust-location.h"
#include "rust-diagnostics.h"
#include "rust-tree.h"
#include "fold-const.h"
#include "realmpfr.h"
#include "convert.h"
#include "print-tree.h"
#include "gimplify.h"
#include "tree-iterator.h"
namespace Rust {
namespace Compile {
struct constexpr_global_ctx
{
HOST_WIDE_INT constexpr_ops_count;
constexpr_global_ctx () : constexpr_ops_count (0) {}
};
struct constexpr_ctx
{
constexpr_global_ctx *global;
};
static tree
constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p,
bool unshare_p);
tree
decl_constant_value (tree decl, bool unshare_p);
static void
non_const_var_error (location_t loc, tree r);
static tree
constexpr_expression (const constexpr_ctx *ctx, tree);
static tree
constexpr_fn_retval (const constexpr_ctx *ctx, tree r);
static tree
eval_store_expression (const constexpr_ctx *ctx, tree r);
static tree
eval_call_expression (const constexpr_ctx *ctx, tree r);
static tree
eval_binary_expression (const constexpr_ctx *ctx, tree r);
static tree
get_function_named_in_call (tree t);
tree
fold_expr (tree expr)
{
constexpr_global_ctx global_ctx;
constexpr_ctx ctx = {&global_ctx};
tree folded = constexpr_expression (&ctx, expr);
rust_assert (folded != NULL_TREE);
return folded;
}
static tree
constexpr_expression (const constexpr_ctx *ctx, tree t)
{
location_t loc = EXPR_LOCATION (t);
if (CONSTANT_CLASS_P (t))
{
if (TREE_OVERFLOW (t))
{
error_at (loc, "overflow in constant expression");
return t;
}
return t;
}
// Avoid excessively long constexpr evaluations
if (++ctx->global->constexpr_ops_count >= constexpr_ops_limit)
{
rust_error_at (
Location (loc),
"%<constexpr%> evaluation operation count exceeds limit of "
"%wd (use %<-fconstexpr-ops-limit=%> to increase the limit)",
constexpr_ops_limit);
return t;
}
tree r = t;
tree_code tcode = TREE_CODE (t);
switch (tcode)
{
case CONST_DECL: {
r = decl_constant_value (t, /*unshare_p=*/false);
if (TREE_CODE (r) == TARGET_EXPR
&& TREE_CODE (TARGET_EXPR_INITIAL (r)) == CONSTRUCTOR)
r = TARGET_EXPR_INITIAL (r);
if (DECL_P (r))
{
non_const_var_error (loc, r);
return r;
}
}
break;
case POINTER_PLUS_EXPR:
case POINTER_DIFF_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
case TRUNC_MOD_EXPR:
case CEIL_MOD_EXPR:
case ROUND_MOD_EXPR:
case RDIV_EXPR:
case EXACT_DIV_EXPR:
case MIN_EXPR:
case MAX_EXPR:
case LSHIFT_EXPR:
case RSHIFT_EXPR:
case LROTATE_EXPR:
case RROTATE_EXPR:
case BIT_IOR_EXPR:
case BIT_XOR_EXPR:
case BIT_AND_EXPR:
case TRUTH_XOR_EXPR:
case LT_EXPR:
case LE_EXPR:
case GT_EXPR:
case GE_EXPR:
case EQ_EXPR:
case NE_EXPR:
case SPACESHIP_EXPR:
case UNORDERED_EXPR:
case ORDERED_EXPR:
case UNLT_EXPR:
case UNLE_EXPR:
case UNGT_EXPR:
case UNGE_EXPR:
case UNEQ_EXPR:
case LTGT_EXPR:
case RANGE_EXPR:
case COMPLEX_EXPR:
r = eval_binary_expression (ctx, t);
break;
case CALL_EXPR:
r = eval_call_expression (ctx, t);
break;
case RETURN_EXPR:
rust_assert (TREE_OPERAND (t, 0) != NULL_TREE);
r = constexpr_expression (ctx, TREE_OPERAND (t, 0));
break;
case MODIFY_EXPR:
r = eval_store_expression (ctx, t);
break;
default:
break;
}
return r;
}
static tree
eval_store_expression (const constexpr_ctx *ctx, tree t)
{
tree init = TREE_OPERAND (t, 1);
if (TREE_CLOBBER_P (init))
/* Just ignore clobbers. */
return void_node;
/* First we figure out where we're storing to. */
tree target = TREE_OPERAND (t, 0);
tree type = TREE_TYPE (target);
bool preeval = SCALAR_TYPE_P (type) || TREE_CODE (t) == MODIFY_EXPR;
if (preeval)
{
/* Evaluate the value to be stored without knowing what object it will be
stored in, so that any side-effects happen first. */
init = fold_expr (init);
}
bool evaluated = false;
tree object = NULL_TREE;
for (tree probe = target; object == NULL_TREE;)
{
switch (TREE_CODE (probe))
{
default:
if (evaluated)
object = probe;
else
{
probe = constexpr_expression (ctx, probe);
evaluated = true;
}
break;
}
}
return init;
}
/* Subroutine of cxx_eval_constant_expression.
Like cxx_eval_unary_expression, except for binary expressions. */
static tree
eval_binary_expression (const constexpr_ctx *ctx, tree t)
{
tree orig_lhs = TREE_OPERAND (t, 0);
tree orig_rhs = TREE_OPERAND (t, 1);
tree lhs, rhs;
lhs = constexpr_expression (ctx, orig_lhs);
rhs = constexpr_expression (ctx, orig_rhs);
location_t loc = EXPR_LOCATION (t);
enum tree_code code = TREE_CODE (t);
tree type = TREE_TYPE (t);
return fold_binary_loc (loc, code, type, lhs, rhs);
}
// Subroutine of cxx_eval_constant_expression.
// Evaluate the call expression tree T in the context of OLD_CALL expression
// evaluation.
static tree
eval_call_expression (const constexpr_ctx *ctx, tree t)
{
tree fun = get_function_named_in_call (t);
return constexpr_fn_retval (ctx, DECL_SAVED_TREE (fun));
}
// Subroutine of check_constexpr_fundef. BODY is the body of a function
// declared to be constexpr, or a sub-statement thereof. Returns the
// return value if suitable, error_mark_node for a statement not allowed in
// a constexpr function, or NULL_TREE if no return value was found.
static tree
constexpr_fn_retval (const constexpr_ctx *ctx, tree body)
{
switch (TREE_CODE (body))
{
case STATEMENT_LIST: {
tree expr = NULL_TREE;
for (tree stmt : tsi_range (body))
{
tree s = constexpr_fn_retval (ctx, stmt);
if (s == error_mark_node)
return error_mark_node;
else if (s == NULL_TREE)
/* Keep iterating. */;
else if (expr)
/* Multiple return statements. */
return error_mark_node;
else
expr = s;
}
return expr;
}
case RETURN_EXPR:
return constexpr_expression (ctx, body);
case DECL_EXPR: {
tree decl = DECL_EXPR_DECL (body);
if (TREE_CODE (decl) == USING_DECL
/* Accept __func__, __FUNCTION__, and __PRETTY_FUNCTION__. */
|| DECL_ARTIFICIAL (decl))
return NULL_TREE;
return error_mark_node;
}
case CLEANUP_POINT_EXPR:
return constexpr_fn_retval (ctx, TREE_OPERAND (body, 0));
case BIND_EXPR: {
tree b = BIND_EXPR_BODY (body);
return constexpr_fn_retval (ctx, b);
}
break;
default:
return error_mark_node;
}
return error_mark_node;
}
// Taken from cp/constexpr.cc
//
// If DECL is a scalar enumeration constant or variable with a
// constant initializer, return the initializer (or, its initializers,
// recursively); otherwise, return DECL. If STRICT_P, the
// initializer is only returned if DECL is a
// constant-expression. If RETURN_AGGREGATE_CST_OK_P, it is ok to
// return an aggregate constant. If UNSHARE_P, return an unshared
// copy of the initializer.
static tree
constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p,
bool unshare_p)
{
while (TREE_CODE (decl) == CONST_DECL)
{
tree init;
/* If DECL is a static data member in a template
specialization, we must instantiate it here. The
initializer for the static data member is not processed
until needed; we need it now. */
init = DECL_INITIAL (decl);
if (init == error_mark_node)
{
if (TREE_CODE (decl) == CONST_DECL)
/* Treat the error as a constant to avoid cascading errors on
excessively recursive template instantiation (c++/9335). */
return init;
else
return decl;
}
decl = init;
}
return unshare_p ? unshare_expr (decl) : decl;
}
// A more relaxed version of decl_really_constant_value, used by the
// common C/C++ code.
tree
decl_constant_value (tree decl, bool unshare_p)
{
return constant_value_1 (decl, /*strict_p=*/false,
/*return_aggregate_cst_ok_p=*/true,
/*unshare_p=*/unshare_p);
}
static void
non_const_var_error (location_t loc, tree r)
{
error_at (loc,
"the value of %qD is not usable in a constant "
"expression",
r);
/* Avoid error cascade. */
if (DECL_INITIAL (r) == error_mark_node)
return;
// more in cp/constexpr.cc
}
static tree
get_callee (tree call)
{
if (call == NULL_TREE)
return call;
else if (TREE_CODE (call) == CALL_EXPR)
return CALL_EXPR_FN (call);
return NULL_TREE;
}
// We have an expression tree T that represents a call, either CALL_EXPR
// or AGGR_INIT_EXPR. If the call is lexically to a named function,
// return the _DECL for that function.
static tree
get_function_named_in_call (tree t)
{
tree fun = get_callee (t);
if (fun && TREE_CODE (fun) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (fun, 0)) == FUNCTION_DECL)
fun = TREE_OPERAND (fun, 0);
return fun;
}
// forked from gcc/cp/constexpr.cc maybe_constexpr_fn
/* True if a function might be declared constexpr */
bool
maybe_constexpr_fn (tree t)
{
return (DECL_DECLARED_CONSTEXPR_P (t));
}
// forked from gcc/cp/constexpr.cc get_nth_callarg
/* We have an expression tree T that represents a call, either CALL_EXPR.
Return the Nth argument. */
inline tree
get_nth_callarg (tree t, int n)
{
return CALL_EXPR_ARG (t, n);
}
// forked from gcc/cp/constexpr.cc var_in_maybe_constexpr_fn
/* True if T was declared in a function that might be constexpr: either a
function that was declared constexpr. */
bool
var_in_maybe_constexpr_fn (tree t)
{
return (DECL_FUNCTION_SCOPE_P (t) && maybe_constexpr_fn (DECL_CONTEXT (t)));
}
} // namespace Compile
} // namespace Rust

View File

@ -0,0 +1,31 @@
// This file is part of GCC.
// GCC is free software; you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 3, or (at your option) any later
// version.
// GCC is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// for more details.
// You should have received a copy of the GNU General Public License
// along with GCC; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#ifndef RUST_CONSTEXPR
#define RUST_CONSTEXPR
#include "rust-system.h"
#include "tree.h"
namespace Rust {
namespace Compile {
extern tree fold_expr (tree);
} // namespace Compile
} // namespace Rust
#endif // RUST_CONSTEXPR