Initial Commit

This commit is contained in:
Riley Schneider
2025-12-03 16:38:10 +01:00
parent c5e26bf594
commit b732d8d4b5
17680 changed files with 5977495 additions and 2 deletions

View File

@@ -0,0 +1,456 @@
package # hide from the pauses
DBIx::Class::ResultSource::RowParser;
use strict;
use warnings;
use base 'DBIx::Class';
use Try::Tiny;
use DBIx::Class::ResultSource::RowParser::Util qw(
assemble_simple_parser
assemble_collapsing_parser
);
use namespace::clean;
# Accepts one or more relationships for the current source and returns an
# array of column names for each of those relationships. Column names are
# prefixed relative to the current source, in accordance with where they appear
# in the supplied relationships.
sub _resolve_prefetch {
my ($self, $pre, $alias, $alias_map, $order, $pref_path) = @_;
$pref_path ||= [];
if (not defined $pre or not length $pre) {
return ();
}
elsif( ref $pre eq 'ARRAY' ) {
return
map { $self->_resolve_prefetch( $_, $alias, $alias_map, $order, [ @$pref_path ] ) }
@$pre;
}
elsif( ref $pre eq 'HASH' ) {
my @ret =
map {
$self->_resolve_prefetch($_, $alias, $alias_map, $order, [ @$pref_path ] ),
$self->related_source($_)->_resolve_prefetch(
$pre->{$_}, "${alias}.$_", $alias_map, $order, [ @$pref_path, $_] )
} keys %$pre;
return @ret;
}
elsif( ref $pre ) {
$self->throw_exception(
"don't know how to resolve prefetch reftype ".ref($pre));
}
else {
my $p = $alias_map;
$p = $p->{$_} for (@$pref_path, $pre);
$self->throw_exception (
"Unable to resolve prefetch '$pre' - join alias map does not contain an entry for path: "
. join (' -> ', @$pref_path, $pre)
) if (ref $p->{-join_aliases} ne 'ARRAY' or not @{$p->{-join_aliases}} );
my $as = shift @{$p->{-join_aliases}};
my $rel_info = $self->relationship_info( $pre );
$self->throw_exception( $self->source_name . " has no such relationship '$pre'" )
unless $rel_info;
my $as_prefix = ($alias =~ /^.*?\.(.+)$/ ? $1.'.' : '');
return map { [ "${as}.$_", "${as_prefix}${pre}.$_", ] }
$self->related_source($pre)->columns;
}
}
# Takes an arrayref of {as} dbic column aliases and the collapse and select
# attributes from the same $rs (the selector requirement is a temporary
# workaround... I hope), and returns a coderef capable of:
# my $me_pref_clps = $coderef->([$rs->cursor->next/all])
# Where the $me_pref_clps arrayref is the future argument to inflate_result()
#
# For an example of this coderef in action (and to see its guts) look at
# t/resultset/rowparser_internals.t
#
# This is a huge performance win, as we call the same code for every row
# returned from the db, thus avoiding repeated method lookups when traversing
# relationships
#
# Also since the coderef is completely stateless (the returned structure is
# always fresh on every new invocation) this is a very good opportunity for
# memoization if further speed improvements are needed
#
# The way we construct this coderef is somewhat fugly, although the result is
# really worth it. The final coderef does not perform any kind of recursion -
# the entire nested structure constructor is rolled out into a single scope.
#
# In any case - the output of this thing is meticulously micro-tested, so
# any sort of adjustment/rewrite should be relatively easy (fsvo relatively)
#
sub _mk_row_parser {
# $args and $attrs are separated to delineate what is core collapser stuff and
# what is dbic $rs specific
my ($self, $args, $attrs) = @_;
die "HRI without pruning makes zero sense"
if ( $args->{hri_style} && ! $args->{prune_null_branches} );
my %common = (
hri_style => $args->{hri_style},
prune_null_branches => $args->{prune_null_branches},
val_index => { map
{ $args->{inflate_map}[$_] => $_ }
( 0 .. $#{$args->{inflate_map}} )
},
);
my $check_null_columns;
my $src = (! $args->{collapse} ) ? assemble_simple_parser(\%common) : do {
my $collapse_map = $self->_resolve_collapse ({
# FIXME
# only consider real columns (not functions) during collapse resolution
# this check shouldn't really be here, as fucktards are not supposed to
# alias random crap to existing column names anyway, but still - just in
# case
# FIXME !!!! - this does not yet deal with unbalanced selectors correctly
# (it is now trivial as the attrs specify where things go out of sync
# needs MOAR tests)
as => { map
{ ref $attrs->{select}[$common{val_index}{$_}] ? () : ( $_ => $common{val_index}{$_} ) }
keys %{$common{val_index}}
},
premultiplied => $args->{premultiplied},
});
$check_null_columns = $collapse_map->{-identifying_columns}
if @{$collapse_map->{-identifying_columns}};
assemble_collapsing_parser({
%common,
collapse_map => $collapse_map,
});
};
utf8::upgrade($src)
if DBIx::Class::_ENV_::STRESSTEST_UTF8_UPGRADE_GENERATED_COLLAPSER_SOURCE;
return (
$args->{eval} ? ( eval "sub $src" || die $@ ) : $src,
$check_null_columns,
);
}
# Takes an arrayref selection list and generates a collapse-map representing
# row-object fold-points. Every relationship is assigned a set of unique,
# non-nullable columns (which may *not even be* from the same resultset)
# and the collapser will use this information to correctly distinguish
# data of individual to-be-row-objects. See t/resultset/rowparser_internals.t
# for extensive RV examples
sub _resolve_collapse {
my ($self, $args, $common_args) = @_;
# for comprehensible error messages put ourselves at the head of the relationship chain
$args->{_rel_chain} ||= [ $self->source_name ];
# record top-level fully-qualified column index, signify toplevelness
unless ($common_args->{_as_fq_idx}) {
$common_args->{_as_fq_idx} = { %{$args->{as}} };
$args->{_is_top_level} = 1;
};
my ($my_cols, $rel_cols);
for (keys %{$args->{as}}) {
if ($_ =~ /^ ([^\.]+) \. (.+) /x) {
$rel_cols->{$1}{$2} = 1;
}
else {
$my_cols->{$_} = {}; # important for ||='s below
}
}
my $relinfo;
# run through relationships, collect metadata
for my $rel (keys %$rel_cols) {
my $inf = $self->relationship_info ($rel);
$relinfo->{$rel} = {
is_single => ( $inf->{attrs}{accessor} && $inf->{attrs}{accessor} ne 'multi' ),
is_inner => ( ( $inf->{attrs}{join_type} || '' ) !~ /^left/i),
rsrc => $self->related_source($rel),
};
# FIME - need to use _resolve_cond here instead
my $cond = $inf->{cond};
if (
ref $cond eq 'HASH'
and
keys %$cond
and
! grep { $_ !~ /^foreign\./ } (keys %$cond)
and
! grep { $_ !~ /^self\./ } (values %$cond)
) {
for my $f (keys %$cond) {
my $s = $cond->{$f};
$_ =~ s/^ (?: foreign | self ) \.//x for ($f, $s);
$relinfo->{$rel}{fk_map}{$s} = $f;
}
}
}
# inject non-left fk-bridges from *INNER-JOINED* children (if any)
for my $rel (grep { $relinfo->{$_}{is_inner} } keys %$relinfo) {
my $ri = $relinfo->{$rel};
for (keys %{$ri->{fk_map}} ) {
# need to know source from *our* pov, hence $rel.col
$my_cols->{$_} ||= { via_fk => "$rel.$ri->{fk_map}{$_}" }
if defined $rel_cols->{$rel}{$ri->{fk_map}{$_}} # in fact selected
}
}
# if the parent is already defined *AND* we have an inner reverse relationship
# (i.e. do not exist without it) , assume all of its related FKs are selected
# (even if they in fact are NOT in the select list). Keep a record of what we
# assumed, and if any such phantom-column becomes part of our own collapser,
# throw everything assumed-from-parent away and replace with the collapser of
# the parent (whatever it may be)
my $assumed_from_parent;
if ( ! $args->{_parent_info}{underdefined} and ! $args->{_parent_info}{rev_rel_is_optional} ) {
for my $col ( values %{$args->{_parent_info}{rel_condition} || {}} ) {
next if exists $my_cols->{$col};
$my_cols->{$col} = { via_collapse => $args->{_parent_info}{collapse_on_idcols} };
$assumed_from_parent->{columns}{$col}++;
}
}
# get colinfo for everything
if ($my_cols) {
my $ci = $self->columns_info;
$my_cols->{$_}{colinfo} = $ci->{$_} for keys %$my_cols;
}
my $collapse_map;
# first try to reuse the parent's collapser (i.e. reuse collapser over 1:1)
# (makes for a leaner coderef later)
unless ($collapse_map->{-identifying_columns}) {
$collapse_map->{-identifying_columns} = $args->{_parent_info}{collapse_on_idcols}
if $args->{_parent_info}{collapser_reusable};
}
# Still don't know how to collapse - try to resolve based on our columns (plus already inserted FK bridges)
if (
! $collapse_map->{-identifying_columns}
and
$my_cols
and
my $idset = $self->_identifying_column_set ({map { $_ => $my_cols->{$_}{colinfo} } keys %$my_cols})
) {
# see if the resulting collapser relies on any implied columns,
# and fix stuff up if this is the case
my @reduced_set = grep { ! $assumed_from_parent->{columns}{$_} } @$idset;
$collapse_map->{-identifying_columns} = [ __unique_numlist(
@{ $args->{_parent_info}{collapse_on_idcols}||[] },
(map
{
my $fqc = join ('.',
@{$args->{_rel_chain}}[1 .. $#{$args->{_rel_chain}}],
( $my_cols->{$_}{via_fk} || $_ ),
);
$common_args->{_as_fq_idx}->{$fqc};
}
@reduced_set
),
)];
}
# Stil don't know how to collapse - keep descending down 1:1 chains - if
# a related non-LEFT 1:1 is resolvable - its condition will collapse us
# too
unless ($collapse_map->{-identifying_columns}) {
my @candidates;
for my $rel (keys %$relinfo) {
next unless ($relinfo->{$rel}{is_single} && $relinfo->{$rel}{is_inner});
if ( my $rel_collapse = $relinfo->{$rel}{rsrc}->_resolve_collapse ({
as => $rel_cols->{$rel},
_rel_chain => [ @{$args->{_rel_chain}}, $rel ],
_parent_info => { underdefined => 1 },
}, $common_args)) {
push @candidates, $rel_collapse->{-identifying_columns};
}
}
# get the set with least amount of columns
# FIXME - maybe need to implement a data type order as well (i.e. prefer several ints
# to a single varchar)
if (@candidates) {
($collapse_map->{-identifying_columns}) = sort { scalar @$a <=> scalar @$b } (@candidates);
}
}
# Stil don't know how to collapse, and we are the root node. Last ditch
# effort in case we are *NOT* premultiplied.
# Run through *each multi* all the way down, left or not, and all
# *left* singles (a single may become a multi underneath) . When everything
# gets back see if all the rels link to us definitively. If this is the
# case we are good - either one of them will define us, or if all are NULLs
# we know we are "unique" due to the "non-premultiplied" check
if (
! $collapse_map->{-identifying_columns}
and
! $args->{premultiplied}
and
$args->{_is_top_level}
) {
my (@collapse_sets, $uncollapsible_chain);
for my $rel (keys %$relinfo) {
# we already looked at these higher up
next if ($relinfo->{$rel}{is_single} && $relinfo->{$rel}{is_inner});
if (my $clps = $relinfo->{$rel}{rsrc}->_resolve_collapse ({
as => $rel_cols->{$rel},
_rel_chain => [ @{$args->{_rel_chain}}, $rel ],
_parent_info => { underdefined => 1 },
}, $common_args) ) {
# for singles use the idcols wholesale (either there or not)
if ($relinfo->{$rel}{is_single}) {
push @collapse_sets, $clps->{-identifying_columns};
}
elsif (! $relinfo->{$rel}{fk_map}) {
$uncollapsible_chain = 1;
last;
}
else {
my $defined_cols_parent_side;
for my $fq_col ( grep { /^$rel\.[^\.]+$/ } keys %{$args->{as}} ) {
my ($col) = $fq_col =~ /([^\.]+)$/;
$defined_cols_parent_side->{$_} = $args->{as}{$fq_col} for grep
{ $relinfo->{$rel}{fk_map}{$_} eq $col }
keys %{$relinfo->{$rel}{fk_map}}
;
}
if (my $set = $self->_identifying_column_set([ keys %$defined_cols_parent_side ]) ) {
push @collapse_sets, [ sort map { $defined_cols_parent_side->{$_} } @$set ];
}
else {
$uncollapsible_chain = 1;
last;
}
}
}
else {
$uncollapsible_chain = 1;
last;
}
}
unless ($uncollapsible_chain) {
# if we got here - we are good to go, but the construction is tricky
# since our children will want to include our collapse criteria - we
# don't give them anything (safe, since they are all collapsible on their own)
# in addition we record the individual collapse possibilities
# of all left children node collapsers, and merge them in the rowparser
# coderef later
$collapse_map->{-identifying_columns} = [];
$collapse_map->{-identifying_columns_variants} = [ sort {
(scalar @$a) <=> (scalar @$b)
or
(
# Poor man's max()
( sort { $b <=> $a } @$a )[0]
<=>
( sort { $b <=> $a } @$b )[0]
)
} @collapse_sets ];
}
}
# stop descending into children if we were called by a parent for first-pass
# and don't despair if nothing was found (there may be other parallel branches
# to dive into)
if ($args->{_parent_info}{underdefined}) {
return $collapse_map->{-identifying_columns} ? $collapse_map : undef
}
# nothing down the chain resolved - can't calculate a collapse-map
elsif (! $collapse_map->{-identifying_columns}) {
$self->throw_exception ( sprintf
"Unable to calculate a definitive collapse column set for %s%s: fetch more unique non-nullable columns",
$self->source_name,
@{$args->{_rel_chain}} > 1
? sprintf (' (last member of the %s chain)', join ' -> ', @{$args->{_rel_chain}} )
: ''
,
);
}
# If we got that far - we are collapsable - GREAT! Now go down all children
# a second time, and fill in the rest
$collapse_map->{-identifying_columns} = [ __unique_numlist(
@{ $args->{_parent_info}{collapse_on_idcols}||[] },
@{ $collapse_map->{-identifying_columns} },
)];
my @id_sets;
for my $rel (sort keys %$relinfo) {
$collapse_map->{$rel} = $relinfo->{$rel}{rsrc}->_resolve_collapse ({
as => { map { $_ => 1 } ( keys %{$rel_cols->{$rel}} ) },
_rel_chain => [ @{$args->{_rel_chain}}, $rel],
_parent_info => {
# shallow copy
collapse_on_idcols => [ @{$collapse_map->{-identifying_columns}} ],
rel_condition => $relinfo->{$rel}{fk_map},
is_optional => ! $relinfo->{$rel}{is_inner},
# if there is at least one *inner* reverse relationship which is HASH-based (equality only)
# we can safely assume that the child can not exist without us
rev_rel_is_optional => ( grep
{ ref $_->{cond} eq 'HASH' and ($_->{attrs}{join_type}||'') !~ /^left/i }
values %{ $self->reverse_relationship_info($rel) },
) ? 0 : 1,
# if this is a 1:1 our own collapser can be used as a collapse-map
# (regardless of left or not)
collapser_reusable => (
$relinfo->{$rel}{is_single}
&&
$relinfo->{$rel}{is_inner}
&&
@{$collapse_map->{-identifying_columns}}
) ? 1 : 0,
},
}, $common_args );
$collapse_map->{$rel}{-is_single} = 1 if $relinfo->{$rel}{is_single};
$collapse_map->{$rel}{-is_optional} ||= 1 unless $relinfo->{$rel}{is_inner};
}
return $collapse_map;
}
# adding a dep on MoreUtils *just* for this is retarded
sub __unique_numlist {
sort { $a <=> $b } keys %{ {map { $_ => 1 } @_ }}
}
1;

View File

@@ -0,0 +1,368 @@
package # hide from the pauses
DBIx::Class::ResultSource::RowParser::Util;
use strict;
use warnings;
use DBIx::Class::_Util 'perlstring';
use constant HAS_DOR => ( $] < 5.010 ? 0 : 1 );
use base 'Exporter';
our @EXPORT_OK = qw(
assemble_simple_parser
assemble_collapsing_parser
);
# working title - we are hoping to extract this eventually...
our $null_branch_class = 'DBIx::ResultParser::RelatedNullBranch';
sub __wrap_in_strictured_scope {
" { use strict; use warnings; use warnings FATAL => 'uninitialized';\n$_[0]\n }"
}
sub assemble_simple_parser {
#my ($args) = @_;
# the non-collapsing assembler is easy
# FIXME SUBOPTIMAL there could be a yet faster way to do things here, but
# need to try an actual implementation and benchmark it:
#
# <timbunce_> First setup the nested data structure you want for each row
# Then call bind_col() to alias the row fields into the right place in
# the data structure, then to fetch the data do:
# push @rows, dclone($row_data_struct) while ($sth->fetchrow);
#
my $parser_src = sprintf('$_ = %s for @{$_[0]}', __visit_infmap_simple($_[0]) );
# change the quoted placeholders to unquoted alias-references
$parser_src =~ s/ \' \xFF__VALPOS__(\d+)__\xFF \' /"\$_->[$1]"/gex;
__wrap_in_strictured_scope($parser_src);
}
# the simple non-collapsing nested structure recursor
sub __visit_infmap_simple {
my $args = shift;
my $my_cols = {};
my $rel_cols;
for (keys %{$args->{val_index}}) {
if ($_ =~ /^ ([^\.]+) \. (.+) /x) {
$rel_cols->{$1}{$2} = $args->{val_index}{$_};
}
else {
$my_cols->{$_} = $args->{val_index}{$_};
}
}
my @relperl;
for my $rel (sort keys %$rel_cols) {
my $rel_struct = __visit_infmap_simple({ %$args,
val_index => $rel_cols->{$rel},
});
if (keys %$my_cols) {
my $branch_null_checks = join ' && ', map
{ "( ! defined '\xFF__VALPOS__${_}__\xFF' )" }
sort { $a <=> $b } values %{$rel_cols->{$rel}}
;
if ($args->{prune_null_branches}) {
$rel_struct = sprintf ( '( (%s) ? undef : %s )',
$branch_null_checks,
$rel_struct,
);
}
else {
$rel_struct = sprintf ( '( (%s) ? bless( (%s), %s ) : %s )',
$branch_null_checks,
$rel_struct,
perlstring($null_branch_class),
$rel_struct,
);
}
}
push @relperl, sprintf '( %s => %s )',
perlstring($rel),
$rel_struct,
;
}
my $me_struct;
$me_struct = __result_struct_to_source($my_cols) if keys %$my_cols;
if ($args->{hri_style}) {
$me_struct =~ s/^ \s* \{ | \} \s* $//gx
if $me_struct;
return sprintf '{ %s }', join (', ', $me_struct||(), @relperl);
}
else {
return sprintf '[%s]', join (',',
$me_struct || 'undef',
@relperl ? sprintf ('{ %s }', join (',', @relperl)) : (),
);
}
}
sub assemble_collapsing_parser {
my $args = shift;
# it may get unset further down
my $no_rowid_container = $args->{prune_null_branches};
my ($top_node_key, $top_node_key_assembler);
if (scalar @{$args->{collapse_map}{-identifying_columns}}) {
$top_node_key = join ('', map
{ "{'\xFF__IDVALPOS__${_}__\xFF'}" }
@{$args->{collapse_map}{-identifying_columns}}
);
}
elsif( my @variants = @{$args->{collapse_map}{-identifying_columns_variants}} ) {
my @path_parts = map { sprintf
"( ( defined '\xFF__VALPOS__%d__\xFF' ) && (join qq(\xFF), '', %s, '') )",
$_->[0], # checking just first is enough - one ID defined, all defined
( join ', ', map { "'\xFF__VALPOS__${_}__\xFF'" } @$_ ),
} @variants;
my $virtual_column_idx = (scalar keys %{$args->{val_index}} ) + 1;
$top_node_key = "{'\xFF__IDVALPOS__${virtual_column_idx}__\xFF'}";
$top_node_key_assembler = sprintf "'\xFF__IDVALPOS__%d__\xFF' = (%s);",
$virtual_column_idx,
"\n" . join( "\n or\n", @path_parts, qq{"\0\$rows_pos\0"} )
;
$args->{collapse_map} = {
%{$args->{collapse_map}},
-custom_node_key => $top_node_key,
};
$no_rowid_container = 0;
}
else {
die('Unexpected collapse map contents');
}
my ($data_assemblers, $stats) = __visit_infmap_collapse ($args);
my @idcol_args = $no_rowid_container ? ('', '') : (
', %cur_row_ids', # only declare the variable if we'll use it
join ("\n", map {
my $quoted_null_val = qq( "\0NULL\xFF\${rows_pos}\xFF${_}\0" );
qq(\$cur_row_ids{$_} = ) . (
# in case we prune - we will never hit these undefs
$args->{prune_null_branches} ? qq( \$cur_row_data->[$_]; )
: HAS_DOR ? qq( \$cur_row_data->[$_] // $quoted_null_val; )
: qq( defined(\$cur_row_data->[$_]) ? \$cur_row_data->[$_] : $quoted_null_val; )
)
} sort { $a <=> $b } keys %{ $stats->{idcols_seen} } ),
);
my $parser_src = sprintf (<<'EOS', @idcol_args, $top_node_key_assembler||'', $top_node_key, join( "\n", @{$data_assemblers||[]} ) );
### BEGIN LITERAL STRING EVAL
my $rows_pos = 0;
my ($result_pos, @collapse_idx, $cur_row_data %1$s);
# this loop is a bit arcane - the rationale is that the passed in
# $_[0] will either have only one row (->next) or will have all
# rows already pulled in (->all and/or unordered). Given that the
# result can be rather large - we reuse the same already allocated
# array, since the collapsed prefetch is smaller by definition.
# At the end we cut the leftovers away and move on.
while ($cur_row_data = (
( $rows_pos >= 0 and $_[0][$rows_pos++] )
or
( $_[1] and $rows_pos = -1 and $_[1]->() )
) ) {
# this code exists only when we are using a cur_row_ids
# furthermore the undef checks may or may not be there
# depending on whether we prune or not
#
# due to left joins some of the ids may be NULL/undef, and
# won't play well when used as hash lookups
# we also need to differentiate NULLs on per-row/per-col basis
# (otherwise folding of optional 1:1s will be greatly confused
%2$s
# in the case of an underdefined root - calculate the virtual id (otherwise no code at all)
%3$s
# if we were supplied a coderef - we are collapsing lazily (the set
# is ordered properly)
# as long as we have a result already and the next result is new we
# return the pre-read data and bail
$_[1] and $result_pos and ! $collapse_idx[0]%4$s and (unshift @{$_[2]}, $cur_row_data) and last;
# the rel assemblers
%5$s
}
$#{$_[0]} = $result_pos - 1; # truncate the passed in array to where we filled it with results
### END LITERAL STRING EVAL
EOS
# !!! note - different var than the one above
# change the quoted placeholders to unquoted alias-references
$parser_src =~ s/ \' \xFF__VALPOS__(\d+)__\xFF \' /"\$cur_row_data->[$1]"/gex;
$parser_src =~ s/
\' \xFF__IDVALPOS__(\d+)__\xFF \'
/
$no_rowid_container ? "\$cur_row_data->[$1]" : "\$cur_row_ids{$1}"
/gex;
__wrap_in_strictured_scope($parser_src);
}
# the collapsing nested structure recursor
sub __visit_infmap_collapse {
my $args = {%{ shift() }};
my $cur_node_idx = ${ $args->{-node_idx_counter} ||= \do { my $x = 0} }++;
my ($my_cols, $rel_cols) = {};
for ( keys %{$args->{val_index}} ) {
if ($_ =~ /^ ([^\.]+) \. (.+) /x) {
$rel_cols->{$1}{$2} = $args->{val_index}{$_};
}
else {
$my_cols->{$_} = $args->{val_index}{$_};
}
}
if ($args->{hri_style}) {
delete $my_cols->{$_} for grep { $rel_cols->{$_} } keys %$my_cols;
}
my $me_struct;
$me_struct = __result_struct_to_source($my_cols) if keys %$my_cols;
$me_struct = sprintf( '[ %s ]', $me_struct||'' )
unless $args->{hri_style};
my $node_key = $args->{collapse_map}->{-custom_node_key} || join ('', map
{ "{'\xFF__IDVALPOS__${_}__\xFF'}" }
@{$args->{collapse_map}->{-identifying_columns}}
);
my $node_idx_slot = sprintf '$collapse_idx[%d]%s', $cur_node_idx, $node_key;
my @src;
if ($cur_node_idx == 0) {
push @src, sprintf( '%s %s $_[0][$result_pos++] = %s;',
$node_idx_slot,
(HAS_DOR ? '//=' : '||='),
$me_struct || '{}',
);
}
else {
my $parent_attach_slot = sprintf( '$collapse_idx[%d]%s%s{%s}',
@{$args}{qw/-parent_node_idx -parent_node_key/},
$args->{hri_style} ? '' : '[1]',
perlstring($args->{-node_rel_name}),
);
if ($args->{collapse_map}->{-is_single}) {
push @src, sprintf ( '%s %s %s%s;',
$parent_attach_slot,
(HAS_DOR ? '//=' : '||='),
$node_idx_slot,
$me_struct ? " = $me_struct" : '',
);
}
else {
push @src, sprintf('(! %s) and push @{%s}, %s%s;',
$node_idx_slot,
$parent_attach_slot,
$node_idx_slot,
$me_struct ? " = $me_struct" : '',
);
}
}
my $known_present_ids = { map { $_ => 1 } @{$args->{collapse_map}{-identifying_columns}} };
my ($stats, $rel_src);
for my $rel (sort keys %$rel_cols) {
my $relinfo = $args->{collapse_map}{$rel};
($rel_src, $stats->{$rel}) = __visit_infmap_collapse({ %$args,
val_index => $rel_cols->{$rel},
collapse_map => $relinfo,
-parent_node_idx => $cur_node_idx,
-parent_node_key => $node_key,
-node_rel_name => $rel,
});
my $rel_src_pos = $#src + 1;
push @src, @$rel_src;
if (
$relinfo->{-is_optional}
and
scalar( my ($first_distinct_child_idcol) = grep
{ ! $known_present_ids->{$_} }
@{$relinfo->{-identifying_columns}}
)
) {
if ($args->{prune_null_branches}) {
# start of wrap of the entire chain in a conditional
splice @src, $rel_src_pos, 0, sprintf "( ! defined %s )\n ? %s%s{%s} = %s\n : do {",
"'\xFF__VALPOS__${first_distinct_child_idcol}__\xFF'",
$node_idx_slot,
$args->{hri_style} ? '' : '[1]',
perlstring($rel),
($args->{hri_style} && $relinfo->{-is_single}) ? 'undef' : '[]'
;
# end of wrap
push @src, '};'
}
else {
splice @src, $rel_src_pos + 1, 0, sprintf ( '(defined %s) or bless (%s[1]{%s}, %s);',
"'\xFF__VALPOS__${first_distinct_child_idcol}__\xFF'",
$node_idx_slot,
perlstring($rel),
perlstring($null_branch_class),
);
}
}
}
return (
\@src,
{
idcols_seen => {
( map { %{ $_->{idcols_seen} } } values %$stats ),
( map { $_ => 1 } @{$args->{collapse_map}->{-identifying_columns}} ),
}
}
);
}
sub __result_struct_to_source {
sprintf( '{ %s }', join (', ', map
{ sprintf "%s => '\xFF__VALPOS__%d__\xFF'", perlstring($_), $_[0]{$_} }
sort keys %{$_[0]}
));
}
1;

View File

@@ -0,0 +1,44 @@
package DBIx::Class::ResultSource::Table;
use strict;
use warnings;
use DBIx::Class::ResultSet;
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/ResultSource/);
=head1 NAME
DBIx::Class::ResultSource::Table - Table object
=head1 SYNOPSIS
=head1 DESCRIPTION
Table object that inherits from L<DBIx::Class::ResultSource>.
=head1 METHODS
=head2 from
Returns the FROM entry for the table (i.e. the table name)
=cut
sub from { shift->name; }
=head1 FURTHER QUESTIONS?
Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
=head1 COPYRIGHT AND LICENSE
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
=cut
1;

View File

@@ -0,0 +1,47 @@
=for comment POD_DERIVED_INDEX_GENERATED
The following documentation is automatically generated. Please do not edit
this file, but rather the original, inline with DBIx::Class::ResultSource::Table
at lib/DBIx/Class/ResultSource/Table.pm
(on the system that originally ran this).
If you do edit this file, and don't want your changes to be removed, make
sure you change the first line.
=cut
=head1 NAME
DBIx::Class::ResultSource::Table - Table object
=head1 SYNOPSIS
=head1 DESCRIPTION
Table object that inherits from L<DBIx::Class::ResultSource>.
=head1 METHODS
=head2 from
Returns the FROM entry for the table (i.e. the table name)
=head1 INHERITED METHODS
=over 4
=item L<DBIx::Class::ResultSource>
L<add_column|DBIx::Class::ResultSource/add_column>, L<add_columns|DBIx::Class::ResultSource/add_columns>, L<add_relationship|DBIx::Class::ResultSource/add_relationship>, L<add_unique_constraint|DBIx::Class::ResultSource/add_unique_constraint>, L<add_unique_constraints|DBIx::Class::ResultSource/add_unique_constraints>, L<column_info|DBIx::Class::ResultSource/column_info>, L<column_info_from_storage|DBIx::Class::ResultSource/column_info_from_storage>, L<columns|DBIx::Class::ResultSource/columns>, L<columns_info|DBIx::Class::ResultSource/columns_info>, L<default_sqlt_deploy_hook|DBIx::Class::ResultSource/default_sqlt_deploy_hook>, L<handle|DBIx::Class::ResultSource/handle>, L<has_column|DBIx::Class::ResultSource/has_column>, L<has_relationship|DBIx::Class::ResultSource/has_relationship>, L<name|DBIx::Class::ResultSource/name>, L<name_unique_constraint|DBIx::Class::ResultSource/name_unique_constraint>, L<new|DBIx::Class::ResultSource/new>, L<primary_columns|DBIx::Class::ResultSource/primary_columns>, L<related_class|DBIx::Class::ResultSource/related_class>, L<related_source|DBIx::Class::ResultSource/related_source>, L<relationship_info|DBIx::Class::ResultSource/relationship_info>, L<relationships|DBIx::Class::ResultSource/relationships>, L<remove_column|DBIx::Class::ResultSource/remove_column>, L<remove_columns|DBIx::Class::ResultSource/remove_columns>, L<result_class|DBIx::Class::ResultSource/result_class>, L<resultset|DBIx::Class::ResultSource/resultset>, L<resultset_attributes|DBIx::Class::ResultSource/resultset_attributes>, L<resultset_class|DBIx::Class::ResultSource/resultset_class>, L<reverse_relationship_info|DBIx::Class::ResultSource/reverse_relationship_info>, L<schema|DBIx::Class::ResultSource/schema>, L<sequence|DBIx::Class::ResultSource/sequence>, L<set_primary_key|DBIx::Class::ResultSource/set_primary_key>, L<source_info|DBIx::Class::ResultSource/source_info>, L<source_name|DBIx::Class::ResultSource/source_name>, L<sqlt_deploy_callback|DBIx::Class::ResultSource/sqlt_deploy_callback>, L<storage|DBIx::Class::ResultSource/storage>, L<throw_exception|DBIx::Class::ResultSource/throw_exception>, L<unique_constraint_columns|DBIx::Class::ResultSource/unique_constraint_columns>, L<unique_constraint_names|DBIx::Class::ResultSource/unique_constraint_names>, L<unique_constraints|DBIx::Class::ResultSource/unique_constraints>
=back
=head1 FURTHER QUESTIONS?
Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
=head1 COPYRIGHT AND LICENSE
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.

View File

@@ -0,0 +1,187 @@
package DBIx::Class::ResultSource::View;
use strict;
use warnings;
use DBIx::Class::ResultSet;
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/ResultSource/);
__PACKAGE__->mk_group_accessors(
'simple' => qw(is_virtual view_definition deploy_depends_on) );
=head1 NAME
DBIx::Class::ResultSource::View - ResultSource object representing a view
=head1 SYNOPSIS
package MyApp::Schema::Result::Year2000CDs;
use base qw/DBIx::Class::Core/;
__PACKAGE__->table_class('DBIx::Class::ResultSource::View');
__PACKAGE__->table('year2000cds');
__PACKAGE__->result_source_instance->is_virtual(1);
__PACKAGE__->result_source_instance->view_definition(
"SELECT cdid, artist, title FROM cd WHERE year ='2000'"
);
__PACKAGE__->add_columns(
'cdid' => {
data_type => 'integer',
is_auto_increment => 1,
},
'artist' => {
data_type => 'integer',
},
'title' => {
data_type => 'varchar',
size => 100,
},
);
=head1 DESCRIPTION
View object that inherits from L<DBIx::Class::ResultSource>
This class extends ResultSource to add basic view support.
A view has a L</view_definition>, which contains a SQL query. The query can
only have parameters if L</is_virtual> is set to true. It may contain JOINs,
sub selects and any other SQL your database supports.
View definition SQL is deployed to your database on
L<DBIx::Class::Schema/deploy> unless you set L</is_virtual> to true.
Deploying the view does B<not> translate it between different database
syntaxes, so be careful what you write in your view SQL.
Virtual views (L</is_virtual> true), are assumed to not
exist in your database as a real view. The L</view_definition> in this
case replaces the view name in a FROM clause in a subselect.
=head1 EXAMPLES
Having created the MyApp::Schema::Year2000CDs schema as shown in the SYNOPSIS
above, you can then:
$2000_cds = $schema->resultset('Year2000CDs')
->search()
->all();
$count = $schema->resultset('Year2000CDs')
->search()
->count();
If you modified the schema to include a placeholder
__PACKAGE__->result_source_instance->view_definition(
"SELECT cdid, artist, title FROM cd WHERE year = ?"
);
and ensuring you have is_virtual set to true:
__PACKAGE__->result_source_instance->is_virtual(1);
You could now say:
$2001_cds = $schema->resultset('Year2000CDs')
->search({}, { bind => [2001] })
->all();
$count = $schema->resultset('Year2000CDs')
->search({}, { bind => [2001] })
->count();
=head1 SQL EXAMPLES
=over
=item is_virtual set to false
$schema->resultset('Year2000CDs')->all();
SELECT cdid, artist, title FROM year2000cds me
=item is_virtual set to true
$schema->resultset('Year2000CDs')->all();
SELECT cdid, artist, title FROM
(SELECT cdid, artist, title FROM cd WHERE year ='2000') me
=back
=head1 METHODS
=head2 is_virtual
__PACKAGE__->result_source_instance->is_virtual(1);
Set to true for a virtual view, false or unset for a real
database-based view.
=head2 view_definition
__PACKAGE__->result_source_instance->view_definition(
"SELECT cdid, artist, title FROM cd WHERE year ='2000'"
);
An SQL query for your view. Will not be translated across database
syntaxes.
=head2 deploy_depends_on
__PACKAGE__->result_source_instance->deploy_depends_on(
["MyApp::Schema::Result::Year","MyApp::Schema::Result::CD"]
);
Specify the views (and only the views) that this view depends on.
Pass this an array reference of fully qualified result classes.
=head1 OVERRIDDEN METHODS
=head2 from
Returns the FROM entry for the table (i.e. the view name)
or the SQL as a subselect if this is a virtual view.
=cut
sub from {
my $self = shift;
return \"(${\$self->view_definition})" if $self->is_virtual;
return $self->name;
}
=head1 OTHER METHODS
=head2 new
The constructor.
=cut
sub new {
my ( $self, @args ) = @_;
my $new = $self->next::method(@args);
$new->{deploy_depends_on} =
{ map { $_ => 1 }
@{ $new->{deploy_depends_on} || [] } }
unless ref $new->{deploy_depends_on} eq 'HASH';
return $new;
}
=head1 FURTHER QUESTIONS?
Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
=head1 COPYRIGHT AND LICENSE
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
=cut
1;

View File

@@ -0,0 +1,172 @@
=for comment POD_DERIVED_INDEX_GENERATED
The following documentation is automatically generated. Please do not edit
this file, but rather the original, inline with DBIx::Class::ResultSource::View
at lib/DBIx/Class/ResultSource/View.pm
(on the system that originally ran this).
If you do edit this file, and don't want your changes to be removed, make
sure you change the first line.
=cut
=head1 NAME
DBIx::Class::ResultSource::View - ResultSource object representing a view
=head1 SYNOPSIS
package MyApp::Schema::Result::Year2000CDs;
use base qw/DBIx::Class::Core/;
__PACKAGE__->table_class('DBIx::Class::ResultSource::View');
__PACKAGE__->table('year2000cds');
__PACKAGE__->result_source_instance->is_virtual(1);
__PACKAGE__->result_source_instance->view_definition(
"SELECT cdid, artist, title FROM cd WHERE year ='2000'"
);
__PACKAGE__->add_columns(
'cdid' => {
data_type => 'integer',
is_auto_increment => 1,
},
'artist' => {
data_type => 'integer',
},
'title' => {
data_type => 'varchar',
size => 100,
},
);
=head1 DESCRIPTION
View object that inherits from L<DBIx::Class::ResultSource>
This class extends ResultSource to add basic view support.
A view has a L</view_definition>, which contains a SQL query. The query can
only have parameters if L</is_virtual> is set to true. It may contain JOINs,
sub selects and any other SQL your database supports.
View definition SQL is deployed to your database on
L<DBIx::Class::Schema/deploy> unless you set L</is_virtual> to true.
Deploying the view does B<not> translate it between different database
syntaxes, so be careful what you write in your view SQL.
Virtual views (L</is_virtual> true), are assumed to not
exist in your database as a real view. The L</view_definition> in this
case replaces the view name in a FROM clause in a subselect.
=head1 EXAMPLES
Having created the MyApp::Schema::Year2000CDs schema as shown in the SYNOPSIS
above, you can then:
$2000_cds = $schema->resultset('Year2000CDs')
->search()
->all();
$count = $schema->resultset('Year2000CDs')
->search()
->count();
If you modified the schema to include a placeholder
__PACKAGE__->result_source_instance->view_definition(
"SELECT cdid, artist, title FROM cd WHERE year = ?"
);
and ensuring you have is_virtual set to true:
__PACKAGE__->result_source_instance->is_virtual(1);
You could now say:
$2001_cds = $schema->resultset('Year2000CDs')
->search({}, { bind => [2001] })
->all();
$count = $schema->resultset('Year2000CDs')
->search({}, { bind => [2001] })
->count();
=head1 SQL EXAMPLES
=over 4
=item is_virtual set to false
$schema->resultset('Year2000CDs')->all();
SELECT cdid, artist, title FROM year2000cds me
=item is_virtual set to true
$schema->resultset('Year2000CDs')->all();
SELECT cdid, artist, title FROM
(SELECT cdid, artist, title FROM cd WHERE year ='2000') me
=back
=head1 METHODS
=head2 is_virtual
__PACKAGE__->result_source_instance->is_virtual(1);
Set to true for a virtual view, false or unset for a real
database-based view.
=head2 view_definition
__PACKAGE__->result_source_instance->view_definition(
"SELECT cdid, artist, title FROM cd WHERE year ='2000'"
);
An SQL query for your view. Will not be translated across database
syntaxes.
=head2 deploy_depends_on
__PACKAGE__->result_source_instance->deploy_depends_on(
["MyApp::Schema::Result::Year","MyApp::Schema::Result::CD"]
);
Specify the views (and only the views) that this view depends on.
Pass this an array reference of fully qualified result classes.
=head1 OVERRIDDEN METHODS
=head2 from
Returns the FROM entry for the table (i.e. the view name)
or the SQL as a subselect if this is a virtual view.
=head1 OTHER METHODS
=head2 new
The constructor.
=head1 INHERITED METHODS
=over 4
=item L<DBIx::Class::ResultSource>
L<add_column|DBIx::Class::ResultSource/add_column>, L<add_columns|DBIx::Class::ResultSource/add_columns>, L<add_relationship|DBIx::Class::ResultSource/add_relationship>, L<add_unique_constraint|DBIx::Class::ResultSource/add_unique_constraint>, L<add_unique_constraints|DBIx::Class::ResultSource/add_unique_constraints>, L<column_info|DBIx::Class::ResultSource/column_info>, L<column_info_from_storage|DBIx::Class::ResultSource/column_info_from_storage>, L<columns|DBIx::Class::ResultSource/columns>, L<columns_info|DBIx::Class::ResultSource/columns_info>, L<default_sqlt_deploy_hook|DBIx::Class::ResultSource/default_sqlt_deploy_hook>, L<handle|DBIx::Class::ResultSource/handle>, L<has_column|DBIx::Class::ResultSource/has_column>, L<has_relationship|DBIx::Class::ResultSource/has_relationship>, L<name|DBIx::Class::ResultSource/name>, L<name_unique_constraint|DBIx::Class::ResultSource/name_unique_constraint>, L<primary_columns|DBIx::Class::ResultSource/primary_columns>, L<related_class|DBIx::Class::ResultSource/related_class>, L<related_source|DBIx::Class::ResultSource/related_source>, L<relationship_info|DBIx::Class::ResultSource/relationship_info>, L<relationships|DBIx::Class::ResultSource/relationships>, L<remove_column|DBIx::Class::ResultSource/remove_column>, L<remove_columns|DBIx::Class::ResultSource/remove_columns>, L<result_class|DBIx::Class::ResultSource/result_class>, L<resultset|DBIx::Class::ResultSource/resultset>, L<resultset_attributes|DBIx::Class::ResultSource/resultset_attributes>, L<resultset_class|DBIx::Class::ResultSource/resultset_class>, L<reverse_relationship_info|DBIx::Class::ResultSource/reverse_relationship_info>, L<schema|DBIx::Class::ResultSource/schema>, L<sequence|DBIx::Class::ResultSource/sequence>, L<set_primary_key|DBIx::Class::ResultSource/set_primary_key>, L<source_info|DBIx::Class::ResultSource/source_info>, L<source_name|DBIx::Class::ResultSource/source_name>, L<sqlt_deploy_callback|DBIx::Class::ResultSource/sqlt_deploy_callback>, L<storage|DBIx::Class::ResultSource/storage>, L<throw_exception|DBIx::Class::ResultSource/throw_exception>, L<unique_constraint_columns|DBIx::Class::ResultSource/unique_constraint_columns>, L<unique_constraint_names|DBIx::Class::ResultSource/unique_constraint_names>, L<unique_constraints|DBIx::Class::ResultSource/unique_constraints>
=back
=head1 FURTHER QUESTIONS?
Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
=head1 COPYRIGHT AND LICENSE
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.