our
$namespace_counter
= 0;
__PACKAGE__->mk_group_accessors(
'simple'
=>
qw/config_dir
_inherited_attributes debug schema_class dumped_objects config_attrs/
);
our
$VERSION
=
'1.001039'
;
$VERSION
=
eval
$VERSION
;
sub
new {
my
$class
=
shift
;
my
(
$params
) =
@_
;
unless
(
ref
$params
eq
'HASH'
) {
return
DBIx::Class::Exception->throw(
'first arg to DBIx::Class::Fixtures->new() must be hash ref'
);
}
unless
(
$params
->{config_dir}) {
return
DBIx::Class::Exception->throw(
'config_dir param not specified'
);
}
my
$config_dir
= io->dir(
$params
->{config_dir});
unless
(-e
$params
->{config_dir}) {
return
DBIx::Class::Exception->throw(
'config_dir directory doesn\'t exist'
);
}
my
$self
= {
config_dir
=>
$config_dir
,
_inherited_attributes
=> [
qw/datetime_relative might_have rules belongs_to/
],
debug
=>
$params
->{debug} || 0,
ignore_sql_errors
=>
$params
->{ignore_sql_errors},
dumped_objects
=> {},
use_create
=>
$params
->{use_create} || 0,
use_find_or_create
=>
$params
->{use_find_or_create} || 0,
config_attrs
=>
$params
->{config_attrs} || {},
};
bless
$self
,
$class
;
return
$self
;
}
my
@config_sets
;
sub
available_config_sets {
@config_sets
=
scalar
(
@config_sets
) ?
@config_sets
:
map
{
$_
->filename;
}
grep
{
-f
"$_"
&&
$_
=~/json$/;
}
shift
->config_dir->all;
}
sub
dump
{
my
$self
=
shift
;
my
(
$params
) =
@_
;
unless
(
ref
$params
eq
'HASH'
) {
return
DBIx::Class::Exception->throw(
'first arg to dump must be hash ref'
);
}
foreach
my
$param
(
qw/schema directory/
) {
unless
(
$params
->{
$param
}) {
return
DBIx::Class::Exception->throw(
$param
.
' param not specified'
);
}
}
if
(
$params
->{excludes} && !
$params
->{all}) {
return
DBIx::Class::Exception->throw(
"'excludes' param only works when using the 'all' param"
);
}
my
$schema
=
$params
->{schema};
my
$config
;
if
(
$params
->{config}) {
$config
=
ref
$params
->{config} eq
'HASH'
?
$params
->{config} :
do
{
my
$config_file
= io->catfile(
$self
->config_dir,
$params
->{config});
$self
->load_config_file(
"$config_file"
);
};
}
elsif
(
$params
->{all}) {
my
%excludes
=
map
{
$_
=>1} @{
$params
->{excludes}||[]};
$config
= {
might_have
=> {
fetch
=> 0 },
has_many
=> {
fetch
=> 0 },
belongs_to
=> {
fetch
=> 0 },
sets
=> [
map
{
{
class
=>
$_
,
quantity
=>
'all'
};
}
grep
{
!
$excludes
{
$_
}
}
$schema
->sources],
};
}
else
{
DBIx::Class::Exception->throw(
'must pass config or set all'
);
}
my
$output_dir
= io->dir(
$params
->{directory});
unless
(-e
"$output_dir"
) {
$output_dir
->mkpath ||
DBIx::Class::Exception->throw(
"output directory does not exist at $output_dir"
);
}
$self
->msg(
"generating fixtures"
);
my
$tmp_output_dir
= io->dir(tempdir);
if
(-e
"$tmp_output_dir"
) {
$self
->msg(
"- clearing existing $tmp_output_dir"
);
$tmp_output_dir
->rmtree;
}
$self
->msg(
"- creating $tmp_output_dir"
);
$tmp_output_dir
->mkpath;
$tmp_output_dir
->file(
'_dumper_version'
)->
print
(
$VERSION
);
$tmp_output_dir
->file(
'_config_set'
)->
print
( Dumper
$config
);
$config
->{rules} ||= {};
my
@sources
= @{
delete
$config
->{sets}};
while
(
my
(
$k
,
$v
) =
each
%{
$config
->{rules} } ) {
if
(
my
$source
=
eval
{
$schema
->source(
$k
) } ) {
$config
->{rules}{
$source
->source_name} =
$v
;
}
}
foreach
my
$source
(
@sources
) {
my
$rule
=
$config
->{rules}->{
$source
->{class}};
$source
= merge(
$source
,
$rule
)
if
(
$rule
);
my
$rs
=
$schema
->resultset(
$source
->{class});
if
(
$source
->{cond} and
ref
$source
->{cond} eq
'HASH'
) {
$source
->{cond} = {
map
{
$_
=> (
$source
->{cond}->{
$_
} =~ s/^\\//) ? \
$source
->{cond}->{
$_
}
:
$source
->{cond}->{
$_
}
}
keys
%{
$source
->{cond}}
};
}
$rs
=
$rs
->search(
$source
->{cond}, {
join
=>
$source
->{
join
} })
if
$source
->{cond};
$self
->msg(
"- dumping $source->{class}"
);
my
%source_options
= (
set
=> { %{
$config
}, %{
$source
} } );
if
(
$source
->{quantity}) {
$rs
=
$rs
->search({}, {
order_by
=>
$source
->{order_by} })
if
$source
->{order_by};
if
(
$source
->{quantity} =~ /^\d+$/) {
$rs
=
$rs
->search({}, {
rows
=>
$source
->{quantity} });
}
elsif
(
$source
->{quantity} ne
'all'
) {
DBIx::Class::Exception->throw(
"invalid value for quantity - $source->{quantity}"
);
}
}
elsif
(
$source
->{ids} && @{
$source
->{ids}}) {
my
@ids
= @{
$source
->{ids}};
my
(
@pks
) =
$rs
->result_source->primary_columns;
die
"Can't dump multiple col-pks using 'id' option"
if
@pks
> 1;
$rs
=
$rs
->search_rs( {
$pks
[0] => {
-in
=> \
@ids
} } );
}
else
{
DBIx::Class::Exception->throw(
'must specify either quantity or ids'
);
}
$source_options
{set_dir} =
$tmp_output_dir
;
$self
->dump_rs(
$rs
, \
%source_options
);
}
foreach
my
$child
(
$output_dir
->all) {
if
(
$child
->is_dir) {
next
if
(
"$child"
eq
"$tmp_output_dir"
);
if
(
grep
{
$_
=~ /\.fix/ }
$child
->all) {
$child
->rmtree;
}
}
elsif
(
$child
=~ /_dumper_version$/) {
$child
->
unlink
;
}
}
$self
->msg(
"- moving temp dir to $output_dir"
);
$tmp_output_dir
->copy(
"$output_dir"
);
if
(-e
"$output_dir"
) {
$self
->msg(
"- clearing tmp dir $tmp_output_dir"
);
$tmp_output_dir
->rmtree;
}
$self
->msg(
"done"
);
return
1;
}
sub
load_config_file {
my
(
$self
,
$config_file
) =
@_
;
DBIx::Class::Exception->throw(
"config does not exist at $config_file"
)
unless
-e
"$config_file"
;
my
$config
= Config::Any::JSON->load(
$config_file
);
if
(
my
$incs
=
$config
->{includes}) {
$self
->msg(
$incs
);
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
)
unless
ref
$incs
eq
'ARRAY'
;
foreach
my
$include_config
(
@$incs
) {
DBIx::Class::Exception->throw(
'includes params of config must be an array ref of hashrefs'
)
unless
(
ref
$include_config
eq
'HASH'
) &&
$include_config
->{file};
my
$include_file
=
$self
->config_dir->file(
$include_config
->{file});
DBIx::Class::Exception->throw(
"config does not exist at $include_file"
)
unless
-e
"$include_file"
;
my
$include
= Config::Any::JSON->load(
$include_file
);
$self
->msg(
$include
);
$config
= merge(
$config
,
$include
);
}
delete
$config
->{includes};
}
return
DBIx::Class::Exception->throw(
'config has no sets'
)
unless
$config
&&
$config
->{sets} &&
ref
$config
->{sets} eq
'ARRAY'
&&
scalar
@{
$config
->{sets}};
$config
->{might_have} = {
fetch
=> 0 }
unless
exists
$config
->{might_have};
$config
->{has_many} = {
fetch
=> 0 }
unless
exists
$config
->{has_many};
$config
->{belongs_to} = {
fetch
=> 1 }
unless
exists
$config
->{belongs_to};
return
$config
;
}
sub
dump_rs {
my
(
$self
,
$rs
,
$params
) =
@_
;
while
(
my
$row
=
$rs
->
next
) {
$self
->dump_object(
$row
,
$params
);
}
}
sub
dump_object {
my
(
$self
,
$object
,
$params
) =
@_
;
my
$set
=
$params
->{set};
my
$v
= Data::Visitor::Callback->new(
plain_value
=>
sub
{
my
(
$visitor
,
$data
) =
@_
;
my
$subs
= {
ENV
=>
sub
{
my
(
$self
,
$v
) =
@_
;
if
(!
defined
(
$ENV
{
$v
})) {
return
""
;
}
else
{
return
$ENV
{
$v
};
}
},
ATTR
=>
sub
{
my
(
$self
,
$v
) =
@_
;
if
(
my
$attr
=
$self
->config_attrs->{
$v
}) {
return
$attr
;
}
else
{
return
""
;
}
},
catfile
=>
sub
{
my
(
$self
,
@args
) =
@_
;
""
.io->catfile(
@args
);
},
catdir
=>
sub
{
my
(
$self
,
@args
) =
@_
;
""
.io->catdir(
@args
);
},
};
my
$subsre
=
join
(
'|'
,
keys
%$subs
);
$_
=~ s{__(
$subsre
)(?:\((.+?)\))?__}{
$subs
->{ $1 }->(
$self
, $2 ?
split
( /,/, $2 ) : () ) }eg;
return
$_
;
}
);
$v
->visit(
$set
);
die
'no dir passed to dump_object'
unless
$params
->{set_dir};
die
'no object passed to dump_object'
unless
$object
;
my
@inherited_attrs
= @{
$self
->_inherited_attributes};
my
@pk_vals
=
map
{
$object
->get_column(
$_
)
}
$object
->primary_columns;
my
$key
=
join
(
"\0"
,
@pk_vals
);
my
$src
=
$object
->result_source;
my
$exists
=
$self
->dumped_objects->{
$src
->name}{
$key
}++;
my
$source_dir
= io->catdir(
$params
->{set_dir},
$self
->_name_for_source(
$src
));
$source_dir
->mkpath(0, 0777);
my
$file
= io->catfile(
"$source_dir"
,
join
(
'-'
,
map
{ s|[/\\:\*\|\?"<>]|_|g;
$_
; }
@pk_vals
) .
'.fix'
);
unless
(
$exists
) {
$self
->msg(
'-- dumping '
.
"$file"
, 2);
my
$col_info
=
$src
->columns_info;
my
@column_names
=
keys
%$col_info
;
my
%columns
=
$object
->get_columns;
my
%ds
;
@ds
{
@column_names
} =
@columns
{
@column_names
};
if
(
$set
->{external}) {
foreach
my
$field
(
keys
%{
$set
->{external}}) {
my
$key
=
$ds
{
$field
};
my
(
$plus
,
$class
) = (
$set
->{external}->{
$field
}->{class}=~/^(\+)*(.+)$/);
my
$args
=
$set
->{external}->{
$field
}->{args};
$class
=
"DBIx::Class::Fixtures::External::$class"
unless
$plus
;
eval
"use $class"
;
$ds
{external}->{
$field
} =
encode_base64(
$class
->backup(
$key
=>
$args
),
''
);
}
}
if
(
$set
->{datetime_relative}) {
my
$formatter
=
eval
{
$object
->result_source->schema->storage->datetime_parser};
unless
(!
$formatter
) {
my
$dt
;
if
(
$set
->{datetime_relative} eq
'today'
) {
$dt
= DateTime->today;
}
else
{
$dt
=
$formatter
->parse_datetime(
$set
->{datetime_relative})
unless
($@);
}
while
(
my
(
$col
,
$value
) =
each
%ds
) {
my
$col_info
=
$object
->result_source->column_info(
$col
);
next
unless
$value
&&
$col_info
->{_inflate_info}
&& (
(
uc
(
$col_info
->{data_type}) eq
'DATETIME'
)
or (
uc
(
$col_info
->{data_type}) eq
'DATE'
)
or (
uc
(
$col_info
->{data_type}) eq
'TIME'
)
or (
uc
(
$col_info
->{data_type}) eq
'TIMESTAMP'
)
or (
uc
(
$col_info
->{data_type}) eq
'INTERVAL'
)
);
$ds
{
$col
} =
$object
->get_inflated_column(
$col
)->subtract_datetime(
$dt
);
}
}
else
{
warn
"datetime_relative not supported for this db driver at the moment"
;
}
}
my
$serialized
= Dump(\
%ds
)->Out();
$file
->
print
(
$serialized
);
}
my
(
$might_have
,
$belongs_to
,
$has_many
) =
map
{
$set
->{
$_
}{fetch} ||
$set
->{rules}{
$src
->source_name}{
$_
}{fetch}
}
qw/might_have belongs_to has_many/
;
return
unless
$might_have
||
$belongs_to
||
$has_many
||
$set
->{fetch};
unless
(
$exists
) {
foreach
my
$name
(
sort
$src
->relationships) {
my
$info
=
$src
->relationship_info(
$name
);
my
$r_source
=
$src
->related_source(
$name
);
if
(
(
$info
->{attrs}{accessor} eq
'single'
&&
(!
$info
->{attrs}{join_type} ||
$might_have
)
)
||
$info
->{attrs}{accessor} eq
'filter'
||
(
$info
->{attrs}{accessor} eq
'multi'
&&
$has_many
)
) {
my
$related_rs
=
$object
->related_resultset(
$name
);
my
$rule
=
$set
->{rules}->{
$related_rs
->result_source->source_name};
if
(
$rule
&&
$info
->{attrs}{accessor} eq
'multi'
) {
$related_rs
=
$related_rs
->search(
$rule
->{cond},
{
join
=>
$rule
->{
join
} }
)
if
(
$rule
->{cond});
$related_rs
=
$related_rs
->search(
{},
{
rows
=>
$rule
->{quantity} }
)
if
(
$rule
->{quantity} &&
$rule
->{quantity} ne
'all'
);
$related_rs
=
$related_rs
->search(
{},
{
order_by
=>
$rule
->{order_by} }
)
if
(
$rule
->{order_by});
}
if
(
$set
->{has_many}{quantity} &&
$set
->{has_many}{quantity} =~ /^\d+$/) {
$related_rs
=
$related_rs
->search(
{},
{
rows
=>
$set
->{has_many}->{quantity} }
);
}
my
%c_params
= %{
$params
};
my
%mock_set
=
map
{
$_
=>
$set
->{
$_
}
}
grep
{
$set
->{
$_
} }
@inherited_attrs
;
$c_params
{set} = \
%mock_set
;
$c_params
{set} = merge(
$c_params
{set},
$rule
)
if
$rule
&&
$rule
->{fetch};
$self
->dump_rs(
$related_rs
, \
%c_params
);
}
}
}
return
unless
$set
&&
$set
->{fetch};
foreach
my
$fetch
(@{
$set
->{fetch}}) {
$fetch
->{
$_
} =
$set
->{
$_
}
foreach
grep
{ !
$fetch
->{
$_
} &&
$set
->{
$_
} }
@inherited_attrs
;
my
$related_rs
=
$object
->related_resultset(
$fetch
->{rel});
my
$rule
=
$set
->{rules}->{
$related_rs
->result_source->source_name};
if
(
$rule
) {
my
$info
=
$object
->result_source->relationship_info(
$fetch
->{rel});
if
(
$info
->{attrs}{accessor} eq
'multi'
) {
$fetch
= merge(
$fetch
,
$rule
);
}
elsif
(
$rule
->{fetch}) {
$fetch
= merge(
$fetch
, {
fetch
=>
$rule
->{fetch} } );
}
}
die
"relationship $fetch->{rel} does not exist for "
.
$src
->source_name
unless
(
$related_rs
);
if
(
$fetch
->{cond} and
ref
$fetch
->{cond} eq
'HASH'
) {
$fetch
->{cond} = {
map
{
$_
=> (
$fetch
->{cond}->{
$_
} =~ s/^\\//) ? \
$fetch
->{cond}->{
$_
}
:
$fetch
->{cond}->{
$_
}
}
keys
%{
$fetch
->{cond}} };
}
$related_rs
=
$related_rs
->search(
$fetch
->{cond},
{
join
=>
$fetch
->{
join
} }
)
if
$fetch
->{cond};
$related_rs
=
$related_rs
->search(
{},
{
rows
=>
$fetch
->{quantity} }
)
if
$fetch
->{quantity} &&
$fetch
->{quantity} ne
'all'
;
$related_rs
=
$related_rs
->search(
{},
{
order_by
=>
$fetch
->{order_by} }
)
if
$fetch
->{order_by};
$self
->dump_rs(
$related_rs
, { %{
$params
},
set
=>
$fetch
});
}
}
sub
_generate_schema {
my
$self
=
shift
;
my
$params
=
shift
|| {};
$self
->msg(
"\ncreating schema"
);
my
$schema_class
=
$self
->schema_class ||
"DBIx::Class::Fixtures::Schema"
;
eval
"require $schema_class"
;
die
$@
if
$@;
my
$pre_schema
;
my
$connection_details
=
$params
->{connection_details};
$namespace_counter
++;
my
$namespace
=
"DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter"
;
Class::C3::Componentised->inject_base(
$namespace
=>
$schema_class
);
$pre_schema
=
$namespace
->
connect
(@{
$connection_details
});
unless
(
$pre_schema
) {
return
DBIx::Class::Exception->throw(
'connection details not valid'
);
}
my
@tables
=
map
{
$self
->_name_for_source(
$pre_schema
->source(
$_
)) }
$pre_schema
->sources;
$self
->msg(
"Tables to drop: ["
.
join
(
', '
,
sort
@tables
) .
"]"
);
my
$dbh
=
$pre_schema
->storage->dbh;
$self
->msg(
"- clearing DB of existing tables"
);
$pre_schema
->storage->txn_do(
sub
{
$pre_schema
->storage->with_deferred_fk_checks(
sub
{
foreach
my
$table
(
@tables
) {
eval
{
$dbh
->
do
(
"drop table $table"
. (
$params
->{cascade} ?
' cascade'
:
''
) )
};
}
});
});
my
$ddl_file
=
$params
->{ddl};
$self
->msg(
"- deploying schema using $ddl_file"
);
my
$data
= _read_sql(
$ddl_file
);
foreach
(
@$data
) {
eval
{
$dbh
->
do
(
$_
) or
warn
"SQL was:\n $_"
};
if
($@ && !
$self
->{ignore_sql_errors}) {
die
"SQL was:\n $_\n$@"
; }
}
$self
->msg(
"- finished importing DDL into DB"
);
$namespace_counter
++;
my
$namespace2
=
"DBIx::Class::Fixtures::GeneratedSchema_$namespace_counter"
;
Class::C3::Componentised->inject_base(
$namespace2
=>
$schema_class
);
my
$schema
=
$namespace2
->
connect
(@{
$connection_details
});
return
$schema
;
}
sub
_read_sql {
my
$ddl_file
=
shift
;
my
$fh
;
open
$fh
,
"<$ddl_file"
or
die
(
"Can't open DDL file, $ddl_file ($!)"
);
my
@data
=
split
(/\n/,
join
(
''
, <
$fh
>));
@data
=
grep
(!/^--/,
@data
);
@data
=
split
(/;/,
join
(
''
,
@data
));
close
(
$fh
);
@data
=
grep
{
$_
&&
$_
!~ /^-- / }
@data
;
return
\
@data
;
}
sub
dump_config_sets {
my
(
$self
,
$params
) =
@_
;
my
$available_config_sets
=
delete
$params
->{configs};
my
$directory_template
=
delete
$params
->{directory_template} ||
DBIx::Class::Exception->throw(
"'directory_template is required parameter"
);
for
my
$set
(
@$available_config_sets
) {
my
$localparams
=
$params
;
$localparams
->{directory} =
$directory_template
->(
$self
,
$localparams
,
$set
);
$localparams
->{config} =
$set
;
$self
->
dump
(
$localparams
);
$self
->dumped_objects({});
}
}
sub
dump_all_config_sets {
my
(
$self
,
$params
) =
@_
;
$self
->dump_config_sets({
%$params
,
configs
=>[
$self
->available_config_sets],
});
}
sub
populate {
my
$self
=
shift
;
my
(
$params
) =
@_
;
DBIx::Class::Exception->throw(
'first arg to populate must be hash ref'
)
unless
ref
$params
eq
'HASH'
;
DBIx::Class::Exception->throw(
'directory param not specified'
)
unless
$params
->{directory};
my
$fixture_dir
= io->dir(
delete
$params
->{directory});
DBIx::Class::Exception->throw(
"fixture directory '$fixture_dir' does not exist"
)
unless
-d
"$fixture_dir"
;
my
$ddl_file
;
my
$dbh
;
my
$schema
;
if
(
$params
->{ddl} &&
$params
->{connection_details}) {
$ddl_file
= io->file(
delete
$params
->{ddl});
unless
(-e
"$ddl_file"
) {
return
DBIx::Class::Exception->throw(
'DDL does not exist at '
.
$ddl_file
);
}
unless
(
ref
$params
->{connection_details} eq
'ARRAY'
) {
return
DBIx::Class::Exception->throw(
'connection details must be an arrayref'
);
}
$schema
=
$self
->_generate_schema({
ddl
=>
"$ddl_file"
,
connection_details
=>
delete
$params
->{connection_details},
%{
$params
}
});
}
elsif
(
$params
->{schema} &&
$params
->{no_deploy}) {
$schema
=
$params
->{schema};
}
else
{
DBIx::Class::Exception->throw(
'you must set the ddl and connection_details params'
);
}
return
1
if
$params
->{no_populate};
$self
->msg(
"\nimporting fixtures"
);
my
$tmp_fixture_dir
= io->dir(tempdir());
my
$config_set_path
= io->file(
$fixture_dir
,
'_config_set'
);
my
$config_set
= -e
"$config_set_path"
?
do
{
my
$VAR1
;
eval
(
$config_set_path
->slurp);
$VAR1
} :
''
;
my
$v
= Data::Visitor::Callback->new(
plain_value
=>
sub
{
my
(
$visitor
,
$data
) =
@_
;
my
$subs
= {
ENV
=>
sub
{
my
(
$self
,
$v
) =
@_
;
if
(!
defined
(
$ENV
{
$v
})) {
return
""
;
}
else
{
return
$ENV
{
$v
};
}
},
ATTR
=>
sub
{
my
(
$self
,
$v
) =
@_
;
if
(
my
$attr
=
$self
->config_attrs->{
$v
}) {
return
$attr
;
}
else
{
return
""
;
}
},
catfile
=>
sub
{
my
(
$self
,
@args
) =
@_
;
io->catfile(
@args
);
},
catdir
=>
sub
{
my
(
$self
,
@args
) =
@_
;
io->catdir(
@args
);
},
};
my
$subsre
=
join
(
'|'
,
keys
%$subs
);
$_
=~ s{__(
$subsre
)(?:\((.+?)\))?__}{
$subs
->{ $1 }->(
$self
, $2 ?
split
( /,/, $2 ) : () ) }eg;
return
$_
;
}
);
$v
->visit(
$config_set
);
my
%sets_by_src
;
if
(
$config_set
) {
%sets_by_src
=
map
{
delete
(
$_
->{class}) =>
$_
}
@{
$config_set
->{sets}}
}
if
(-e
"$tmp_fixture_dir"
) {
$self
->msg(
"- deleting existing temp directory $tmp_fixture_dir"
);
$tmp_fixture_dir
->rmtree;
}
$self
->msg(
"- creating temp dir"
);
$tmp_fixture_dir
->mkpath();
for
(
map
{
$self
->_name_for_source(
$schema
->source(
$_
)) }
$schema
->sources) {
my
$from_dir
= io->catdir(
$fixture_dir
,
$_
);
next
unless
-e
"$from_dir"
;
$from_dir
->copy( io->catdir(
$tmp_fixture_dir
,
$_
).
""
);
}
unless
(-d
"$tmp_fixture_dir"
) {
DBIx::Class::Exception->throw(
"Unable to create temporary fixtures dir: $tmp_fixture_dir: $!"
);
}
my
$fixup_visitor
;
my
$formatter
=
$schema
->storage->datetime_parser;
unless
($@ || !
$formatter
) {
my
%callbacks
;
if
(
$params
->{datetime_relative_to}) {
$callbacks
{
'DateTime::Duration'
} =
sub
{
$params
->{datetime_relative_to}->clone->add_duration(
$_
);
};
}
else
{
$callbacks
{
'DateTime::Duration'
} =
sub
{
$formatter
->format_datetime(DateTime->today->add_duration(
$_
))
};
}
$callbacks
{object} ||=
"visit_ref"
;
$fixup_visitor
= new Data::Visitor::Callback(
%callbacks
);
}
my
@sorted_source_names
=
$self
->_get_sorted_sources(
$schema
);
$schema
->storage->txn_do(
sub
{
$schema
->storage->with_deferred_fk_checks(
sub
{
foreach
my
$source
(
@sorted_source_names
) {
$self
->msg(
"- adding "
.
$source
);
my
$rs
=
$schema
->resultset(
$source
);
my
$source_dir
= io->catdir(
$tmp_fixture_dir
,
$self
->_name_for_source(
$rs
->result_source));
next
unless
(-e
"$source_dir"
);
my
@rows
;
while
(
my
$file
=
$source_dir
->
next
) {
next
unless
(
$file
=~ /\.fix$/);
next
if
$file
->is_dir;
my
$contents
=
$file
->slurp;
my
$HASH1
;
eval
(
$contents
);
$HASH1
=
$fixup_visitor
->visit(
$HASH1
)
if
$fixup_visitor
;
if
(
my
$external
=
delete
$HASH1
->{external}) {
my
@fields
=
keys
%{
$sets_by_src
{
$source
}->{external}};
foreach
my
$field
(
@fields
) {
my
$key
=
$HASH1
->{
$field
};
my
$content
= decode_base64 (
$external
->{
$field
});
my
$args
=
$sets_by_src
{
$source
}->{external}->{
$field
}->{args};
my
(
$plus
,
$class
) = (
$sets_by_src
{
$source
}->{external}->{
$field
}->{class}=~/^(\+)*(.+)$/);
$class
=
"DBIx::Class::Fixtures::External::$class"
unless
$plus
;
eval
"use $class"
;
$class
->restore(
$key
,
$content
,
$args
);
}
}
if
(
$params
->{use_create} ) {
$rs
->create(
$HASH1
);
}
elsif
(
$params
->{use_find_or_create} ) {
$rs
->find_or_create(
$HASH1
);
}
else
{
push
(
@rows
,
$HASH1
);
}
}
$rs
->populate(\
@rows
)
if
scalar
(
@rows
);
my
$table
=
$rs
->result_source->name;
for
my
$column
(
my
@columns
=
$rs
->result_source->columns) {
my
$info
=
$rs
->result_source->column_info(
$column
);
if
(
my
$sequence
=
$info
->{sequence}) {
$self
->msg(
"- updating sequence $sequence"
);
$rs
->result_source->storage->dbh_do(
sub
{
my
(
$storage
,
$dbh
,
@cols
) =
@_
;
if
(
$dbh
->{Driver}->{Name} eq
"Oracle"
) {
$self
->msg(
"- Cannot change sequence values in Oracle"
);
}
else
{
$self
->msg(
my
$sql
=
sprintf
(
"SELECT setval(?, (SELECT max(%s) FROM %s));"
,
$dbh
->quote_identifier(
$column
),
$dbh
->quote_identifier(
$table
))
);
my
$sth
=
$dbh
->prepare(
$sql
);
$sth
->bind_param(1,
$sequence
);
my
$rv
=
$sth
->execute or
die
$sth
->errstr;
$self
->msg(
"- $sql"
);
}
});
}
}
}
});
});
$self
->do_post_ddl( {
schema
=>
$schema
,
post_ddl
=>
$params
->{post_ddl}
} )
if
$params
->{post_ddl};
$self
->msg(
"- fixtures imported"
);
$self
->msg(
"- cleaning up"
);
$tmp_fixture_dir
->rmtree;
return
1;
}
sub
_get_sorted_sources {
my
(
$self
,
$dbicschema
) =
@_
;
my
%table_monikers
=
map
{
$_
=> 1 }
$dbicschema
->sources;
my
%tables
;
foreach
my
$moniker
(
sort
keys
%table_monikers
) {
my
$source
=
$dbicschema
->source(
$moniker
);
my
$table_name
=
$source
->name;
my
@primary
=
$source
->primary_columns;
my
@rels
=
$source
->relationships();
my
%created_FK_rels
;
foreach
my
$rel
(
sort
@rels
) {
my
$rel_info
=
$source
->relationship_info(
$rel
);
next
unless
ref
$rel_info
->{cond} eq
'HASH'
;
my
@keys
=
map
{
$rel_info
->{cond}->{
$_
} =~ /^\w+\.(\w+)$/}
keys
(%{
$rel_info
->{cond}});
my
$fk_constraint
;
if
(
exists
$rel_info
->{attrs}{is_foreign_key_constraint} ) {
$fk_constraint
=
$rel_info
->{attrs}{is_foreign_key_constraint};
}
elsif
(
$rel_info
->{attrs}{accessor}
&&
$rel_info
->{attrs}{accessor} eq
'multi'
) {
$fk_constraint
= 0;
}
else
{
$fk_constraint
= not
$source
->_compare_relationship_keys(\
@keys
, \
@primary
);
}
next
unless
$fk_constraint
;
my
$rel_table
=
$source
->related_source(
$rel
)->source_name;
my
$key_test
=
join
(
"\x00"
,
sort
@keys
);
next
if
$created_FK_rels
{
$rel_table
}->{
$key_test
};
if
(
scalar
(
@keys
)) {
$created_FK_rels
{
$rel_table
}->{
$key_test
} = 1;
if
(!
$rel_info
->{attrs}{is_deferrable} and
$rel_table
ne
$table_name
) {
$tables
{
$moniker
}{
$rel_table
}++;
}
}
}
$tables
{
$moniker
} = {}
unless
exists
$tables
{
$moniker
};
}
my
$dependencies
= {
map
{
$_
=> _resolve_deps (
$_
, \
%tables
) } (
keys
%tables
)
};
return
sort
{
keys
%{
$dependencies
->{
$a
} || {} } <=>
keys
%{
$dependencies
->{
$b
} || {} }
||
$a
cmp
$b
} (
keys
%tables
);
}
sub
_resolve_deps {
my
(
$question
,
$answers
,
$seen
) =
@_
;
my
$ret
= {};
$seen
||= {};
my
%seen
=
map
{
$_
=>
$seen
->{
$_
} + 1 } (
keys
%$seen
);
$seen
{
$question
} = 1;
for
my
$dep
(
keys
%{
$answers
->{
$question
} }) {
return
{}
if
$seen
->{
$dep
};
my
$subdeps
= _resolve_deps(
$dep
,
$answers
, \
%seen
);
$ret
->{
$_
} +=
$subdeps
->{
$_
}
for
(
keys
%$subdeps
);
++
$ret
->{
$dep
};
}
return
$ret
;
}
sub
do_post_ddl {
my
(
$self
,
$params
) =
@_
;
my
$schema
=
$params
->{schema};
my
$data
= _read_sql(
$params
->{post_ddl});
foreach
(
@$data
) {
eval
{
$schema
->storage->dbh->
do
(
$_
) or
warn
"SQL was:\n $_"
};
if
($@ && !
$self
->{ignore_sql_errors}) {
die
"SQL was:\n $_\n$@"
; }
}
$self
->msg(
"- finished importing post-populate DDL into DB"
);
}
sub
msg {
my
$self
=
shift
;
my
$subject
=
shift
||
return
;
my
$level
=
shift
|| 1;
return
unless
$self
->debug >=
$level
;
if
(
ref
$subject
) {
print
Dumper(
$subject
);
}
else
{
print
$subject
.
"\n"
;
}
}
sub
_name_for_source {
my
(
$self
,
$source
) =
@_
;
return
ref
$source
->name ?
$source
->source_name :
$source
->name;
}
1;